mirror of
https://github.com/arsenetar/dupeguru.git
synced 2026-01-27 09:01:39 +00:00
Compare commits
72 Commits
4.1.1
...
809116c764
| Author | SHA1 | Date | |
|---|---|---|---|
|
809116c764
|
|||
|
83f401595d
|
|||
|
814d145366
|
|||
|
efb76c7686
|
|||
|
47dbe805bb
|
|||
|
f11fccc889
|
|||
|
2e13c4ccb5
|
|||
|
da72ffd1fd
|
|||
|
2c9437bef4
|
|||
|
f9085386a6
|
|||
|
d576a7043c
|
|||
|
1ef5f56158
|
|||
|
f9316de244
|
|||
|
0189c29f47
|
|||
|
b4fa1d68f0
|
|||
|
16df882481
|
|||
|
58c04ff9ad
|
|||
|
6b8f85e39a
|
|||
|
2fff1a3436
|
|||
|
a685524dd5
|
|||
|
74918e2c56
|
|||
|
18895d983b
|
|||
|
fe720208ea
|
|||
| 091d9e9239 | |||
|
5a4958cff9
|
|||
|
be10b462fc
|
|||
|
d62b13bcdb
|
|||
|
06eca11f0b
|
|||
|
2879f18e0d
|
|||
|
3ee21771f9
|
|||
| c0ba6fb57a | |||
|
bc942b8263
|
|||
|
ffe6b7047c
|
|||
|
9446f37fad
|
|||
|
af19660c18
|
|||
|
99ad297906
|
|||
| e11f996dfc | |||
|
|
e95306e58f | ||
|
|
891a875990 | ||
|
|
545a5a75fb | ||
|
|
7b764f183e | ||
|
fdc8a17d26
|
|||
|
cb3bbbec6e
|
|||
|
c51a82a2ce
|
|||
|
0cd8f5e948
|
|||
|
9c09607c08
|
|||
|
3bd342770c
|
|||
| 14b456dcf9 | |||
|
|
3dccb686e2 | ||
| 0db66baace | |||
| e3828ae2ca | |||
|
|
23c59787e5 | ||
| 2f8d603251 | |||
|
|
a51f263632 | ||
|
|
718ca5b313 | ||
|
|
277bc3fbb8 | ||
|
|
e07dfd5955 | ||
| 4641bd6ec9 | |||
|
|
a6f83ad3d7 | ||
|
|
ab8750eedb | ||
|
|
22033211d6 | ||
| 0b46ca2222 | |||
| 72e0f76242 | |||
|
|
65c1d463f8 | ||
| e6c791ab0a | |||
|
|
78f5088101 | ||
|
|
095df5eb95 | ||
|
|
f1ae478433 | ||
|
|
c4dcfd3d4b | ||
| 0840104edf | |||
|
|
6b4b436251 | ||
|
|
d18b8c10ec |
50
.github/workflows/codeql-analysis.yml
vendored
Normal file
50
.github/workflows/codeql-analysis.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
name: "CodeQL"
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ master ]
|
||||||
|
pull_request:
|
||||||
|
# The branches below must be a subset of the branches above
|
||||||
|
branches: [ master ]
|
||||||
|
schedule:
|
||||||
|
- cron: '24 20 * * 2'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
analyze:
|
||||||
|
name: Analyze
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
permissions:
|
||||||
|
actions: read
|
||||||
|
contents: read
|
||||||
|
security-events: write
|
||||||
|
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
language: [ 'cpp', 'python' ]
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
# Initializes the CodeQL tools for scanning.
|
||||||
|
- name: Initialize CodeQL
|
||||||
|
uses: github/codeql-action/init@v1
|
||||||
|
with:
|
||||||
|
languages: ${{ matrix.language }}
|
||||||
|
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||||
|
# By default, queries listed here will override any specified in a config file.
|
||||||
|
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||||
|
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||||
|
- if: matrix.language == 'cpp'
|
||||||
|
name: Build Cpp
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install python3-pyqt5
|
||||||
|
make modules
|
||||||
|
- if: matrix.language == 'python'
|
||||||
|
name: Autobuild
|
||||||
|
uses: github/codeql-action/autobuild@v1
|
||||||
|
# Analysis
|
||||||
|
- name: Perform CodeQL Analysis
|
||||||
|
uses: github/codeql-action/analyze@v1
|
||||||
74
.github/workflows/default.yml
vendored
Normal file
74
.github/workflows/default.yml
vendored
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# Workflow lints, and checks format in parallel then runs tests on all platforms
|
||||||
|
|
||||||
|
name: Default CI/CD
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [ master ]
|
||||||
|
pull_request:
|
||||||
|
branches: [ master ]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up Python 3.9
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt -r requirements-extra.txt
|
||||||
|
- name: Lint with flake8
|
||||||
|
run: |
|
||||||
|
flake8 .
|
||||||
|
format:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up Python 3.9
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt -r requirements-extra.txt
|
||||||
|
- name: Check format with black
|
||||||
|
run: |
|
||||||
|
black .
|
||||||
|
test:
|
||||||
|
needs: [lint, format]
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||||
|
python-version: [3.6, 3.7, 3.8, 3.9]
|
||||||
|
exclude:
|
||||||
|
- os: macos-latest
|
||||||
|
python-version: 3.6
|
||||||
|
- os: macos-latest
|
||||||
|
python-version: 3.7
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: 3.6
|
||||||
|
- os: windows-latest
|
||||||
|
python-version: 3.7
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt -r requirements-extra.txt
|
||||||
|
- name: Build python modules
|
||||||
|
run: |
|
||||||
|
python build.py --modules
|
||||||
|
- name: Run tests
|
||||||
|
run: |
|
||||||
|
pytest core hscommon
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,5 +1,6 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
__pycache__
|
__pycache__
|
||||||
|
*.egg-info
|
||||||
*.so
|
*.so
|
||||||
*.mo
|
*.mo
|
||||||
*.waf*
|
*.waf*
|
||||||
|
|||||||
27
.travis.yml
27
.travis.yml
@@ -1,27 +0,0 @@
|
|||||||
sudo: false
|
|
||||||
language: python
|
|
||||||
install:
|
|
||||||
- pip3 install -r requirements.txt -r requirements-extra.txt
|
|
||||||
script: tox
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- os: "linux"
|
|
||||||
dist: "xenial"
|
|
||||||
python: "3.6"
|
|
||||||
- os: "linux"
|
|
||||||
dist: "xenial"
|
|
||||||
python: "3.7"
|
|
||||||
- os: "linux"
|
|
||||||
dist: "focal"
|
|
||||||
python: "3.8"
|
|
||||||
- os: "linux"
|
|
||||||
dist: "focal"
|
|
||||||
python: "3.9"
|
|
||||||
- os: "windows"
|
|
||||||
language: shell
|
|
||||||
python: "3.8"
|
|
||||||
env: "PATH=/c/python38:/c/python38/Scripts:$PATH"
|
|
||||||
before_install:
|
|
||||||
- choco install python --version=3.8.6
|
|
||||||
- cp /c/python38/python.exe /c/python38/python3.exe
|
|
||||||
script: tox -e py38
|
|
||||||
11
.tx/config
11
.tx/config
@@ -1,21 +1,26 @@
|
|||||||
[main]
|
[main]
|
||||||
host = https://www.transifex.com
|
host = https://www.transifex.com
|
||||||
|
|
||||||
[dupeguru.core]
|
[dupeguru-1.core]
|
||||||
file_filter = locale/<lang>/LC_MESSAGES/core.po
|
file_filter = locale/<lang>/LC_MESSAGES/core.po
|
||||||
source_file = locale/core.pot
|
source_file = locale/core.pot
|
||||||
source_lang = en
|
source_lang = en
|
||||||
type = PO
|
type = PO
|
||||||
|
|
||||||
[dupeguru.columns]
|
[dupeguru-1.columns]
|
||||||
file_filter = locale/<lang>/LC_MESSAGES/columns.po
|
file_filter = locale/<lang>/LC_MESSAGES/columns.po
|
||||||
source_file = locale/columns.pot
|
source_file = locale/columns.pot
|
||||||
source_lang = en
|
source_lang = en
|
||||||
type = PO
|
type = PO
|
||||||
|
|
||||||
[dupeguru.ui]
|
[dupeguru-1.ui]
|
||||||
file_filter = locale/<lang>/LC_MESSAGES/ui.po
|
file_filter = locale/<lang>/LC_MESSAGES/ui.po
|
||||||
source_file = locale/ui.pot
|
source_file = locale/ui.pot
|
||||||
source_lang = en
|
source_lang = en
|
||||||
type = PO
|
type = PO
|
||||||
|
|
||||||
|
[dupeguru-1.qtlib]
|
||||||
|
file_filter = qtlib/locale/<lang>/LC_MESSAGES/qtlib.po
|
||||||
|
source_file = qtlib/locale/qtlib.pot
|
||||||
|
source_lang = en
|
||||||
|
type = PO
|
||||||
|
|||||||
6
MANIFEST.in
Normal file
6
MANIFEST.in
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
recursive-include core *.h
|
||||||
|
recursive-include core *.m
|
||||||
|
include run.py
|
||||||
|
graft locale
|
||||||
|
graft help
|
||||||
|
graft qtlib/locale
|
||||||
135
build.py
135
build.py
@@ -4,19 +4,16 @@
|
|||||||
# which should be included with this package. The terms are also available at
|
# which should be included with this package. The terms are also available at
|
||||||
# http://www.gnu.org/licenses/gpl-3.0.html
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
||||||
|
|
||||||
import os
|
from pathlib import Path
|
||||||
import os.path as op
|
import sys
|
||||||
from optparse import OptionParser
|
from optparse import OptionParser
|
||||||
import shutil
|
import shutil
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from setuptools import setup, Extension
|
|
||||||
|
|
||||||
|
from setuptools import sandbox
|
||||||
from hscommon import sphinxgen
|
from hscommon import sphinxgen
|
||||||
from hscommon.build import (
|
from hscommon.build import (
|
||||||
add_to_pythonpath,
|
add_to_pythonpath,
|
||||||
print_and_do,
|
print_and_do,
|
||||||
move_all,
|
|
||||||
fix_qt_resource_file,
|
fix_qt_resource_file,
|
||||||
)
|
)
|
||||||
from hscommon import loc
|
from hscommon import loc
|
||||||
@@ -31,12 +28,8 @@ def parse_args():
|
|||||||
dest="clean",
|
dest="clean",
|
||||||
help="Clean build folder before building",
|
help="Clean build folder before building",
|
||||||
)
|
)
|
||||||
parser.add_option(
|
parser.add_option("--doc", action="store_true", dest="doc", help="Build only the help file")
|
||||||
"--doc", action="store_true", dest="doc", help="Build only the help file"
|
parser.add_option("--loc", action="store_true", dest="loc", help="Build only localization")
|
||||||
)
|
|
||||||
parser.add_option(
|
|
||||||
"--loc", action="store_true", dest="loc", help="Build only localization"
|
|
||||||
)
|
|
||||||
parser.add_option(
|
parser.add_option(
|
||||||
"--updatepot",
|
"--updatepot",
|
||||||
action="store_true",
|
action="store_true",
|
||||||
@@ -61,26 +54,20 @@ def parse_args():
|
|||||||
dest="modules",
|
dest="modules",
|
||||||
help="Build the python modules.",
|
help="Build the python modules.",
|
||||||
)
|
)
|
||||||
parser.add_option(
|
|
||||||
"--importpo",
|
|
||||||
action="store_true",
|
|
||||||
dest="importpo",
|
|
||||||
help="Import all PO files downloaded from transifex.",
|
|
||||||
)
|
|
||||||
(options, args) = parser.parse_args()
|
(options, args) = parser.parse_args()
|
||||||
return options
|
return options
|
||||||
|
|
||||||
|
|
||||||
def build_help():
|
def build_help():
|
||||||
print("Generating Help")
|
print("Generating Help")
|
||||||
current_path = op.abspath(".")
|
current_path = Path(".").absolute()
|
||||||
help_basepath = op.join(current_path, "help", "en")
|
help_basepath = current_path.joinpath("help", "en")
|
||||||
help_destpath = op.join(current_path, "build", "help")
|
help_destpath = current_path.joinpath("build", "help")
|
||||||
changelog_path = op.join(current_path, "help", "changelog")
|
changelog_path = current_path.joinpath("help", "changelog")
|
||||||
tixurl = "https://github.com/arsenetar/dupeguru/issues/{}"
|
tixurl = "https://github.com/arsenetar/dupeguru/issues/{}"
|
||||||
confrepl = {"language": "en"}
|
confrepl = {"language": "en"}
|
||||||
changelogtmpl = op.join(current_path, "help", "changelog.tmpl")
|
changelogtmpl = current_path.joinpath("help", "changelog.tmpl")
|
||||||
conftmpl = op.join(current_path, "help", "conf.tmpl")
|
conftmpl = current_path.joinpath("help", "conf.tmpl")
|
||||||
sphinxgen.gen(
|
sphinxgen.gen(
|
||||||
help_basepath,
|
help_basepath,
|
||||||
help_destpath,
|
help_destpath,
|
||||||
@@ -93,102 +80,51 @@ def build_help():
|
|||||||
|
|
||||||
|
|
||||||
def build_qt_localizations():
|
def build_qt_localizations():
|
||||||
loc.compile_all_po(op.join("qtlib", "locale"))
|
loc.compile_all_po(Path("qtlib", "locale"))
|
||||||
loc.merge_locale_dir(op.join("qtlib", "locale"), "locale")
|
loc.merge_locale_dir(Path("qtlib", "locale"), "locale")
|
||||||
|
|
||||||
|
|
||||||
def build_localizations():
|
def build_localizations():
|
||||||
loc.compile_all_po("locale")
|
loc.compile_all_po("locale")
|
||||||
build_qt_localizations()
|
build_qt_localizations()
|
||||||
locale_dest = op.join("build", "locale")
|
locale_dest = Path("build", "locale")
|
||||||
if op.exists(locale_dest):
|
if locale_dest.exists():
|
||||||
shutil.rmtree(locale_dest)
|
shutil.rmtree(locale_dest)
|
||||||
shutil.copytree(
|
shutil.copytree("locale", locale_dest, ignore=shutil.ignore_patterns("*.po", "*.pot"))
|
||||||
"locale", locale_dest, ignore=shutil.ignore_patterns("*.po", "*.pot")
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def build_updatepot():
|
def build_updatepot():
|
||||||
print("Building .pot files from source files")
|
print("Building .pot files from source files")
|
||||||
print("Building core.pot")
|
print("Building core.pot")
|
||||||
loc.generate_pot(["core"], op.join("locale", "core.pot"), ["tr"])
|
loc.generate_pot(["core"], Path("locale", "core.pot"), ["tr"])
|
||||||
print("Building columns.pot")
|
print("Building columns.pot")
|
||||||
loc.generate_pot(["core"], op.join("locale", "columns.pot"), ["coltr"])
|
loc.generate_pot(["core"], Path("locale", "columns.pot"), ["coltr"])
|
||||||
print("Building ui.pot")
|
print("Building ui.pot")
|
||||||
# When we're not under OS X, we don't want to overwrite ui.pot because it contains Cocoa locs
|
# When we're not under OS X, we don't want to overwrite ui.pot because it contains Cocoa locs
|
||||||
# We want to merge the generated pot with the old pot in the most preserving way possible.
|
# We want to merge the generated pot with the old pot in the most preserving way possible.
|
||||||
ui_packages = ["qt", op.join("cocoa", "inter")]
|
ui_packages = ["qt", Path("cocoa", "inter")]
|
||||||
loc.generate_pot(ui_packages, op.join("locale", "ui.pot"), ["tr"], merge=True)
|
loc.generate_pot(ui_packages, Path("locale", "ui.pot"), ["tr"], merge=True)
|
||||||
print("Building qtlib.pot")
|
print("Building qtlib.pot")
|
||||||
loc.generate_pot(["qtlib"], op.join("qtlib", "locale", "qtlib.pot"), ["tr"])
|
loc.generate_pot(["qtlib"], Path("qtlib", "locale", "qtlib.pot"), ["tr"])
|
||||||
|
|
||||||
|
|
||||||
def build_mergepot():
|
def build_mergepot():
|
||||||
print("Updating .po files using .pot files")
|
print("Updating .po files using .pot files")
|
||||||
loc.merge_pots_into_pos("locale")
|
loc.merge_pots_into_pos("locale")
|
||||||
loc.merge_pots_into_pos(op.join("qtlib", "locale"))
|
loc.merge_pots_into_pos(Path("qtlib", "locale"))
|
||||||
# loc.merge_pots_into_pos(op.join("cocoalib", "locale"))
|
# loc.merge_pots_into_pos(Path("cocoalib", "locale"))
|
||||||
|
|
||||||
|
|
||||||
def build_normpo():
|
def build_normpo():
|
||||||
loc.normalize_all_pos("locale")
|
loc.normalize_all_pos("locale")
|
||||||
loc.normalize_all_pos(op.join("qtlib", "locale"))
|
loc.normalize_all_pos(Path("qtlib", "locale"))
|
||||||
# loc.normalize_all_pos(op.join("cocoalib", "locale"))
|
# loc.normalize_all_pos(Path("cocoalib", "locale"))
|
||||||
|
|
||||||
|
|
||||||
def build_importpo():
|
|
||||||
basePath = Path.cwd()
|
|
||||||
# expect a folder named transifex with all the .po files from the exports
|
|
||||||
translationsPath = basePath.joinpath("transifex")
|
|
||||||
# locations where the translation files go
|
|
||||||
qtlibPath = basePath.joinpath("qtlib", "locale")
|
|
||||||
localePath = basePath.joinpath("locale")
|
|
||||||
for translation in translationsPath.iterdir():
|
|
||||||
# transifex files are named resource_lang.po so split on first '_'
|
|
||||||
parts = translation.stem.split("_", 1)
|
|
||||||
resource = parts[0]
|
|
||||||
language = parts[1]
|
|
||||||
# make sure qtlib resources go to dedicated folder
|
|
||||||
if resource == "qtlib":
|
|
||||||
outputPath = qtlibPath
|
|
||||||
else:
|
|
||||||
outputPath = localePath
|
|
||||||
outputFolder = outputPath.joinpath(language, "LC_MESSAGES")
|
|
||||||
# create the language folder if it is new
|
|
||||||
if not outputFolder.exists():
|
|
||||||
outputFolder.mkdir(parents=True)
|
|
||||||
# copy the po file over
|
|
||||||
shutil.copy(translation, outputFolder.joinpath(resource + ".po"))
|
|
||||||
# normalize files after complete
|
|
||||||
build_normpo()
|
|
||||||
|
|
||||||
|
|
||||||
def build_pe_modules():
|
def build_pe_modules():
|
||||||
print("Building PE Modules")
|
print("Building PE Modules")
|
||||||
exts = [
|
# Leverage setup.py to build modules
|
||||||
Extension(
|
sandbox.run_setup("setup.py", ["build_ext", "--inplace"])
|
||||||
"_block",
|
|
||||||
[
|
|
||||||
op.join("core", "pe", "modules", "block.c"),
|
|
||||||
op.join("core", "pe", "modules", "common.c"),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
Extension(
|
|
||||||
"_cache",
|
|
||||||
[
|
|
||||||
op.join("core", "pe", "modules", "cache.c"),
|
|
||||||
op.join("core", "pe", "modules", "common.c"),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
]
|
|
||||||
exts.append(Extension("_block_qt", [op.join("qt", "pe", "modules", "block.c")]))
|
|
||||||
setup(
|
|
||||||
script_args=["build_ext", "--inplace"],
|
|
||||||
ext_modules=exts,
|
|
||||||
)
|
|
||||||
move_all("_block_qt*", op.join("qt", "pe"))
|
|
||||||
move_all("_block*", op.join("core", "pe"))
|
|
||||||
move_all("_cache*", op.join("core", "pe"))
|
|
||||||
|
|
||||||
|
|
||||||
def build_normal():
|
def build_normal():
|
||||||
@@ -199,20 +135,19 @@ def build_normal():
|
|||||||
print("Building localizations")
|
print("Building localizations")
|
||||||
build_localizations()
|
build_localizations()
|
||||||
print("Building Qt stuff")
|
print("Building Qt stuff")
|
||||||
print_and_do(
|
print_and_do("pyrcc5 {0} > {1}".format(Path("qt", "dg.qrc"), Path("qt", "dg_rc.py")))
|
||||||
"pyrcc5 {0} > {1}".format(op.join("qt", "dg.qrc"), op.join("qt", "dg_rc.py"))
|
fix_qt_resource_file(Path("qt", "dg_rc.py"))
|
||||||
)
|
|
||||||
fix_qt_resource_file(op.join("qt", "dg_rc.py"))
|
|
||||||
build_help()
|
build_help()
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
if sys.version_info < (3, 6):
|
||||||
|
sys.exit("Python < 3.6 is unsupported.")
|
||||||
options = parse_args()
|
options = parse_args()
|
||||||
if options.clean:
|
if options.clean and Path("build").exists():
|
||||||
if op.exists("build"):
|
|
||||||
shutil.rmtree("build")
|
shutil.rmtree("build")
|
||||||
if not op.exists("build"):
|
if not Path("build").exists():
|
||||||
os.mkdir("build")
|
Path("build").mkdir()
|
||||||
if options.doc:
|
if options.doc:
|
||||||
build_help()
|
build_help()
|
||||||
elif options.loc:
|
elif options.loc:
|
||||||
@@ -225,8 +160,6 @@ def main():
|
|||||||
build_normpo()
|
build_normpo()
|
||||||
elif options.modules:
|
elif options.modules:
|
||||||
build_pe_modules()
|
build_pe_modules()
|
||||||
elif options.importpo:
|
|
||||||
build_importpo()
|
|
||||||
else:
|
else:
|
||||||
build_normal()
|
build_normal()
|
||||||
|
|
||||||
|
|||||||
231
core/app.py
231
core/app.py
@@ -48,31 +48,31 @@ MSG_MANY_FILES_TO_OPEN = tr(
|
|||||||
|
|
||||||
|
|
||||||
class DestType:
|
class DestType:
|
||||||
Direct = 0
|
DIRECT = 0
|
||||||
Relative = 1
|
RELATIVE = 1
|
||||||
Absolute = 2
|
ABSOLUTE = 2
|
||||||
|
|
||||||
|
|
||||||
class JobType:
|
class JobType:
|
||||||
Scan = "job_scan"
|
SCAN = "job_scan"
|
||||||
Load = "job_load"
|
LOAD = "job_load"
|
||||||
Move = "job_move"
|
MOVE = "job_move"
|
||||||
Copy = "job_copy"
|
COPY = "job_copy"
|
||||||
Delete = "job_delete"
|
DELETE = "job_delete"
|
||||||
|
|
||||||
|
|
||||||
class AppMode:
|
class AppMode:
|
||||||
Standard = 0
|
STANDARD = 0
|
||||||
Music = 1
|
MUSIC = 1
|
||||||
Picture = 2
|
PICTURE = 2
|
||||||
|
|
||||||
|
|
||||||
JOBID2TITLE = {
|
JOBID2TITLE = {
|
||||||
JobType.Scan: tr("Scanning for duplicates"),
|
JobType.SCAN: tr("Scanning for duplicates"),
|
||||||
JobType.Load: tr("Loading"),
|
JobType.LOAD: tr("Loading"),
|
||||||
JobType.Move: tr("Moving"),
|
JobType.MOVE: tr("Moving"),
|
||||||
JobType.Copy: tr("Copying"),
|
JobType.COPY: tr("Copying"),
|
||||||
JobType.Delete: tr("Sending to Trash"),
|
JobType.DELETE: tr("Sending to Trash"),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@@ -126,18 +126,16 @@ class DupeGuru(Broadcaster):
|
|||||||
|
|
||||||
PICTURE_CACHE_TYPE = "sqlite" # set to 'shelve' for a ShelveCache
|
PICTURE_CACHE_TYPE = "sqlite" # set to 'shelve' for a ShelveCache
|
||||||
|
|
||||||
def __init__(self, view):
|
def __init__(self, view, portable=False):
|
||||||
if view.get_default(DEBUG_MODE_PREFERENCE):
|
if view.get_default(DEBUG_MODE_PREFERENCE):
|
||||||
logging.getLogger().setLevel(logging.DEBUG)
|
logging.getLogger().setLevel(logging.DEBUG)
|
||||||
logging.debug("Debug mode enabled")
|
logging.debug("Debug mode enabled")
|
||||||
Broadcaster.__init__(self)
|
Broadcaster.__init__(self)
|
||||||
self.view = view
|
self.view = view
|
||||||
self.appdata = desktop.special_folder_path(
|
self.appdata = desktop.special_folder_path(desktop.SpecialFolder.APPDATA, appname=self.NAME, portable=portable)
|
||||||
desktop.SpecialFolder.AppData, appname=self.NAME
|
|
||||||
)
|
|
||||||
if not op.exists(self.appdata):
|
if not op.exists(self.appdata):
|
||||||
os.makedirs(self.appdata)
|
os.makedirs(self.appdata)
|
||||||
self.app_mode = AppMode.Standard
|
self.app_mode = AppMode.STANDARD
|
||||||
self.discarded_file_count = 0
|
self.discarded_file_count = 0
|
||||||
self.exclude_list = ExcludeList()
|
self.exclude_list = ExcludeList()
|
||||||
self.directories = directories.Directories(self.exclude_list)
|
self.directories = directories.Directories(self.exclude_list)
|
||||||
@@ -150,7 +148,7 @@ class DupeGuru(Broadcaster):
|
|||||||
"escape_filter_regexp": True,
|
"escape_filter_regexp": True,
|
||||||
"clean_empty_dirs": False,
|
"clean_empty_dirs": False,
|
||||||
"ignore_hardlink_matches": False,
|
"ignore_hardlink_matches": False,
|
||||||
"copymove_dest_type": DestType.Relative,
|
"copymove_dest_type": DestType.RELATIVE,
|
||||||
"picture_cache_type": self.PICTURE_CACHE_TYPE,
|
"picture_cache_type": self.PICTURE_CACHE_TYPE,
|
||||||
}
|
}
|
||||||
self.selected_dupes = []
|
self.selected_dupes = []
|
||||||
@@ -171,9 +169,9 @@ class DupeGuru(Broadcaster):
|
|||||||
def _recreate_result_table(self):
|
def _recreate_result_table(self):
|
||||||
if self.result_table is not None:
|
if self.result_table is not None:
|
||||||
self.result_table.disconnect()
|
self.result_table.disconnect()
|
||||||
if self.app_mode == AppMode.Picture:
|
if self.app_mode == AppMode.PICTURE:
|
||||||
self.result_table = pe.result_table.ResultTable(self)
|
self.result_table = pe.result_table.ResultTable(self)
|
||||||
elif self.app_mode == AppMode.Music:
|
elif self.app_mode == AppMode.MUSIC:
|
||||||
self.result_table = me.result_table.ResultTable(self)
|
self.result_table = me.result_table.ResultTable(self)
|
||||||
else:
|
else:
|
||||||
self.result_table = se.result_table.ResultTable(self)
|
self.result_table = se.result_table.ResultTable(self)
|
||||||
@@ -182,20 +180,14 @@ class DupeGuru(Broadcaster):
|
|||||||
|
|
||||||
def _get_picture_cache_path(self):
|
def _get_picture_cache_path(self):
|
||||||
cache_type = self.options["picture_cache_type"]
|
cache_type = self.options["picture_cache_type"]
|
||||||
cache_name = (
|
cache_name = "cached_pictures.shelve" if cache_type == "shelve" else "cached_pictures.db"
|
||||||
"cached_pictures.shelve" if cache_type == "shelve" else "cached_pictures.db"
|
|
||||||
)
|
|
||||||
return op.join(self.appdata, cache_name)
|
return op.join(self.appdata, cache_name)
|
||||||
|
|
||||||
def _get_dupe_sort_key(self, dupe, get_group, key, delta):
|
def _get_dupe_sort_key(self, dupe, get_group, key, delta):
|
||||||
if self.app_mode in (AppMode.Music, AppMode.Picture):
|
if self.app_mode in (AppMode.MUSIC, AppMode.PICTURE) and key == "folder_path":
|
||||||
if key == "folder_path":
|
dupe_folder_path = getattr(dupe, "display_folder_path", dupe.folder_path)
|
||||||
dupe_folder_path = getattr(
|
|
||||||
dupe, "display_folder_path", dupe.folder_path
|
|
||||||
)
|
|
||||||
return str(dupe_folder_path).lower()
|
return str(dupe_folder_path).lower()
|
||||||
if self.app_mode == AppMode.Picture:
|
if self.app_mode == AppMode.PICTURE and delta and key == "dimensions":
|
||||||
if delta and key == "dimensions":
|
|
||||||
r = cmp_value(dupe, key)
|
r = cmp_value(dupe, key)
|
||||||
ref_value = cmp_value(get_group().ref, key)
|
ref_value = cmp_value(get_group().ref, key)
|
||||||
return get_delta_dimensions(r, ref_value)
|
return get_delta_dimensions(r, ref_value)
|
||||||
@@ -218,11 +210,8 @@ class DupeGuru(Broadcaster):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def _get_group_sort_key(self, group, key):
|
def _get_group_sort_key(self, group, key):
|
||||||
if self.app_mode in (AppMode.Music, AppMode.Picture):
|
if self.app_mode in (AppMode.MUSIC, AppMode.PICTURE) and key == "folder_path":
|
||||||
if key == "folder_path":
|
dupe_folder_path = getattr(group.ref, "display_folder_path", group.ref.folder_path)
|
||||||
dupe_folder_path = getattr(
|
|
||||||
group.ref, "display_folder_path", group.ref.folder_path
|
|
||||||
)
|
|
||||||
return str(dupe_folder_path).lower()
|
return str(dupe_folder_path).lower()
|
||||||
if key == "percentage":
|
if key == "percentage":
|
||||||
return group.percentage
|
return group.percentage
|
||||||
@@ -235,9 +224,7 @@ class DupeGuru(Broadcaster):
|
|||||||
def _do_delete(self, j, link_deleted, use_hardlinks, direct_deletion):
|
def _do_delete(self, j, link_deleted, use_hardlinks, direct_deletion):
|
||||||
def op(dupe):
|
def op(dupe):
|
||||||
j.add_progress()
|
j.add_progress()
|
||||||
return self._do_delete_dupe(
|
return self._do_delete_dupe(dupe, link_deleted, use_hardlinks, direct_deletion)
|
||||||
dupe, link_deleted, use_hardlinks, direct_deletion
|
|
||||||
)
|
|
||||||
|
|
||||||
j.start_job(self.results.mark_count)
|
j.start_job(self.results.mark_count)
|
||||||
self.results.perform_on_marked(op, True)
|
self.results.perform_on_marked(op, True)
|
||||||
@@ -277,11 +264,7 @@ class DupeGuru(Broadcaster):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def _get_export_data(self):
|
def _get_export_data(self):
|
||||||
columns = [
|
columns = [col for col in self.result_table._columns.ordered_columns if col.visible and col.name != "marked"]
|
||||||
col
|
|
||||||
for col in self.result_table.columns.ordered_columns
|
|
||||||
if col.visible and col.name != "marked"
|
|
||||||
]
|
|
||||||
colnames = [col.display for col in columns]
|
colnames = [col.display for col in columns]
|
||||||
rows = []
|
rows = []
|
||||||
for group_id, group in enumerate(self.results.groups):
|
for group_id, group in enumerate(self.results.groups):
|
||||||
@@ -293,11 +276,7 @@ class DupeGuru(Broadcaster):
|
|||||||
return colnames, rows
|
return colnames, rows
|
||||||
|
|
||||||
def _results_changed(self):
|
def _results_changed(self):
|
||||||
self.selected_dupes = [
|
self.selected_dupes = [d for d in self.selected_dupes if self.results.get_group_of_duplicate(d) is not None]
|
||||||
d
|
|
||||||
for d in self.selected_dupes
|
|
||||||
if self.results.get_group_of_duplicate(d) is not None
|
|
||||||
]
|
|
||||||
self.notify("results_changed")
|
self.notify("results_changed")
|
||||||
|
|
||||||
def _start_job(self, jobid, func, args=()):
|
def _start_job(self, jobid, func, args=()):
|
||||||
@@ -312,34 +291,32 @@ class DupeGuru(Broadcaster):
|
|||||||
self.view.show_message(msg)
|
self.view.show_message(msg)
|
||||||
|
|
||||||
def _job_completed(self, jobid):
|
def _job_completed(self, jobid):
|
||||||
if jobid == JobType.Scan:
|
if jobid == JobType.SCAN:
|
||||||
self._results_changed()
|
self._results_changed()
|
||||||
if not self.results.groups:
|
if not self.results.groups:
|
||||||
self.view.show_message(tr("No duplicates found."))
|
self.view.show_message(tr("No duplicates found."))
|
||||||
else:
|
else:
|
||||||
self.view.show_results_window()
|
self.view.show_results_window()
|
||||||
if jobid in {JobType.Move, JobType.Delete}:
|
if jobid in {JobType.MOVE, JobType.DELETE}:
|
||||||
self._results_changed()
|
self._results_changed()
|
||||||
if jobid == JobType.Load:
|
if jobid == JobType.LOAD:
|
||||||
self._recreate_result_table()
|
self._recreate_result_table()
|
||||||
self._results_changed()
|
self._results_changed()
|
||||||
self.view.show_results_window()
|
self.view.show_results_window()
|
||||||
if jobid in {JobType.Copy, JobType.Move, JobType.Delete}:
|
if jobid in {JobType.COPY, JobType.MOVE, JobType.DELETE}:
|
||||||
if self.results.problems:
|
if self.results.problems:
|
||||||
self.problem_dialog.refresh()
|
self.problem_dialog.refresh()
|
||||||
self.view.show_problem_dialog()
|
self.view.show_problem_dialog()
|
||||||
else:
|
else:
|
||||||
msg = {
|
msg = {
|
||||||
JobType.Copy: tr("All marked files were copied successfully."),
|
JobType.COPY: tr("All marked files were copied successfully."),
|
||||||
JobType.Move: tr("All marked files were moved successfully."),
|
JobType.MOVE: tr("All marked files were moved successfully."),
|
||||||
JobType.Delete: tr(
|
JobType.DELETE: tr("All marked files were successfully sent to Trash."),
|
||||||
"All marked files were successfully sent to Trash."
|
|
||||||
),
|
|
||||||
}[jobid]
|
}[jobid]
|
||||||
self.view.show_message(msg)
|
self.view.show_message(msg)
|
||||||
|
|
||||||
def _job_error(self, jobid, err):
|
def _job_error(self, jobid, err):
|
||||||
if jobid == JobType.Load:
|
if jobid == JobType.LOAD:
|
||||||
msg = tr("Could not load file: {}").format(err)
|
msg = tr("Could not load file: {}").format(err)
|
||||||
self.view.show_message(msg)
|
self.view.show_message(msg)
|
||||||
return False
|
return False
|
||||||
@@ -369,17 +346,17 @@ class DupeGuru(Broadcaster):
|
|||||||
|
|
||||||
# --- Protected
|
# --- Protected
|
||||||
def _get_fileclasses(self):
|
def _get_fileclasses(self):
|
||||||
if self.app_mode == AppMode.Picture:
|
if self.app_mode == AppMode.PICTURE:
|
||||||
return [pe.photo.PLAT_SPECIFIC_PHOTO_CLASS]
|
return [pe.photo.PLAT_SPECIFIC_PHOTO_CLASS]
|
||||||
elif self.app_mode == AppMode.Music:
|
elif self.app_mode == AppMode.MUSIC:
|
||||||
return [me.fs.MusicFile]
|
return [me.fs.MusicFile]
|
||||||
else:
|
else:
|
||||||
return [se.fs.File]
|
return [se.fs.File]
|
||||||
|
|
||||||
def _prioritization_categories(self):
|
def _prioritization_categories(self):
|
||||||
if self.app_mode == AppMode.Picture:
|
if self.app_mode == AppMode.PICTURE:
|
||||||
return pe.prioritize.all_categories()
|
return pe.prioritize.all_categories()
|
||||||
elif self.app_mode == AppMode.Music:
|
elif self.app_mode == AppMode.MUSIC:
|
||||||
return me.prioritize.all_categories()
|
return me.prioritize.all_categories()
|
||||||
else:
|
else:
|
||||||
return prioritize.all_categories()
|
return prioritize.all_categories()
|
||||||
@@ -401,35 +378,32 @@ class DupeGuru(Broadcaster):
|
|||||||
self.view.show_message(tr("'{}' does not exist.").format(d))
|
self.view.show_message(tr("'{}' does not exist.").format(d))
|
||||||
|
|
||||||
def add_selected_to_ignore_list(self):
|
def add_selected_to_ignore_list(self):
|
||||||
"""Adds :attr:`selected_dupes` to :attr:`ignore_list`.
|
"""Adds :attr:`selected_dupes` to :attr:`ignore_list`."""
|
||||||
"""
|
|
||||||
dupes = self.without_ref(self.selected_dupes)
|
dupes = self.without_ref(self.selected_dupes)
|
||||||
if not dupes:
|
if not dupes:
|
||||||
self.view.show_message(MSG_NO_SELECTED_DUPES)
|
self.view.show_message(MSG_NO_SELECTED_DUPES)
|
||||||
return
|
return
|
||||||
msg = tr(
|
msg = tr("All selected %d matches are going to be ignored in all subsequent scans. Continue?")
|
||||||
"All selected %d matches are going to be ignored in all subsequent scans. Continue?"
|
|
||||||
)
|
|
||||||
if not self.view.ask_yes_no(msg % len(dupes)):
|
if not self.view.ask_yes_no(msg % len(dupes)):
|
||||||
return
|
return
|
||||||
for dupe in dupes:
|
for dupe in dupes:
|
||||||
g = self.results.get_group_of_duplicate(dupe)
|
g = self.results.get_group_of_duplicate(dupe)
|
||||||
for other in g:
|
for other in g:
|
||||||
if other is not dupe:
|
if other is not dupe:
|
||||||
self.ignore_list.Ignore(str(other.path), str(dupe.path))
|
self.ignore_list.ignore(str(other.path), str(dupe.path))
|
||||||
self.remove_duplicates(dupes)
|
self.remove_duplicates(dupes)
|
||||||
self.ignore_list_dialog.refresh()
|
self.ignore_list_dialog.refresh()
|
||||||
|
|
||||||
def apply_filter(self, filter):
|
def apply_filter(self, result_filter):
|
||||||
"""Apply a filter ``filter`` to the results so that it shows only dupe groups that match it.
|
"""Apply a filter ``filter`` to the results so that it shows only dupe groups that match it.
|
||||||
|
|
||||||
:param str filter: filter to apply
|
:param str filter: filter to apply
|
||||||
"""
|
"""
|
||||||
self.results.apply_filter(None)
|
self.results.apply_filter(None)
|
||||||
if self.options["escape_filter_regexp"]:
|
if self.options["escape_filter_regexp"]:
|
||||||
filter = escape(filter, set("()[]\\.|+?^"))
|
result_filter = escape(result_filter, set("()[]\\.|+?^"))
|
||||||
filter = escape(filter, "*", ".")
|
result_filter = escape(result_filter, "*", ".")
|
||||||
self.results.apply_filter(filter)
|
self.results.apply_filter(result_filter)
|
||||||
self._results_changed()
|
self._results_changed()
|
||||||
|
|
||||||
def clean_empty_dirs(self, path):
|
def clean_empty_dirs(self, path):
|
||||||
@@ -447,10 +421,10 @@ class DupeGuru(Broadcaster):
|
|||||||
source_path = dupe.path
|
source_path = dupe.path
|
||||||
location_path = first(p for p in self.directories if dupe.path in p)
|
location_path = first(p for p in self.directories if dupe.path in p)
|
||||||
dest_path = Path(destination)
|
dest_path = Path(destination)
|
||||||
if dest_type in {DestType.Relative, DestType.Absolute}:
|
if dest_type in {DestType.RELATIVE, DestType.ABSOLUTE}:
|
||||||
# no filename, no windows drive letter
|
# no filename, no windows drive letter
|
||||||
source_base = source_path.remove_drive_letter().parent()
|
source_base = source_path.remove_drive_letter().parent()
|
||||||
if dest_type == DestType.Relative:
|
if dest_type == DestType.RELATIVE:
|
||||||
source_base = source_base[location_path:]
|
source_base = source_base[location_path:]
|
||||||
dest_path = dest_path[source_base]
|
dest_path = dest_path[source_base]
|
||||||
if not dest_path.exists():
|
if not dest_path.exists():
|
||||||
@@ -483,16 +457,17 @@ class DupeGuru(Broadcaster):
|
|||||||
self.view.show_message(MSG_NO_MARKED_DUPES)
|
self.view.show_message(MSG_NO_MARKED_DUPES)
|
||||||
return
|
return
|
||||||
destination = self.view.select_dest_folder(
|
destination = self.view.select_dest_folder(
|
||||||
tr("Select a directory to copy marked files to") if copy
|
tr("Select a directory to copy marked files to")
|
||||||
else tr("Select a directory to move marked files to"))
|
if copy
|
||||||
|
else tr("Select a directory to move marked files to")
|
||||||
|
)
|
||||||
if destination:
|
if destination:
|
||||||
desttype = self.options["copymove_dest_type"]
|
desttype = self.options["copymove_dest_type"]
|
||||||
jobid = JobType.Copy if copy else JobType.Move
|
jobid = JobType.COPY if copy else JobType.MOVE
|
||||||
self._start_job(jobid, do)
|
self._start_job(jobid, do)
|
||||||
|
|
||||||
def delete_marked(self):
|
def delete_marked(self):
|
||||||
"""Start an async job to send marked duplicates to the trash.
|
"""Start an async job to send marked duplicates to the trash."""
|
||||||
"""
|
|
||||||
if not self.results.mark_count:
|
if not self.results.mark_count:
|
||||||
self.view.show_message(MSG_NO_MARKED_DUPES)
|
self.view.show_message(MSG_NO_MARKED_DUPES)
|
||||||
return
|
return
|
||||||
@@ -504,7 +479,7 @@ class DupeGuru(Broadcaster):
|
|||||||
self.deletion_options.direct,
|
self.deletion_options.direct,
|
||||||
]
|
]
|
||||||
logging.debug("Starting deletion job with args %r", args)
|
logging.debug("Starting deletion job with args %r", args)
|
||||||
self._start_job(JobType.Delete, self._do_delete, args=args)
|
self._start_job(JobType.DELETE, self._do_delete, args=args)
|
||||||
|
|
||||||
def export_to_xhtml(self):
|
def export_to_xhtml(self):
|
||||||
"""Export current results to XHTML.
|
"""Export current results to XHTML.
|
||||||
@@ -523,9 +498,7 @@ class DupeGuru(Broadcaster):
|
|||||||
The columns and their order in the resulting CSV file is determined in the same way as in
|
The columns and their order in the resulting CSV file is determined in the same way as in
|
||||||
:meth:`export_to_xhtml`.
|
:meth:`export_to_xhtml`.
|
||||||
"""
|
"""
|
||||||
dest_file = self.view.select_dest_file(
|
dest_file = self.view.select_dest_file(tr("Select a destination for your exported CSV"), "csv")
|
||||||
tr("Select a destination for your exported CSV"), "csv"
|
|
||||||
)
|
|
||||||
if dest_file:
|
if dest_file:
|
||||||
colnames, rows = self._get_export_data()
|
colnames, rows = self._get_export_data()
|
||||||
try:
|
try:
|
||||||
@@ -542,9 +515,7 @@ class DupeGuru(Broadcaster):
|
|||||||
try:
|
try:
|
||||||
return dupe.get_display_info(group, delta)
|
return dupe.get_display_info(group, delta)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.warning(
|
logging.warning("Exception (type: %s) on GetDisplayInfo for %s: %s", type(e), str(dupe.path), str(e))
|
||||||
"Exception (type: %s) on GetDisplayInfo for %s: %s",
|
|
||||||
type(e), str(dupe.path), str(e))
|
|
||||||
return empty_data()
|
return empty_data()
|
||||||
|
|
||||||
def invoke_custom_command(self):
|
def invoke_custom_command(self):
|
||||||
@@ -556,9 +527,7 @@ class DupeGuru(Broadcaster):
|
|||||||
"""
|
"""
|
||||||
cmd = self.view.get_default("CustomCommand")
|
cmd = self.view.get_default("CustomCommand")
|
||||||
if not cmd:
|
if not cmd:
|
||||||
msg = tr(
|
msg = tr("You have no custom command set up. Set it up in your preferences.")
|
||||||
"You have no custom command set up. Set it up in your preferences."
|
|
||||||
)
|
|
||||||
self.view.show_message(msg)
|
self.view.show_message(msg)
|
||||||
return
|
return
|
||||||
if not self.selected_dupes:
|
if not self.selected_dupes:
|
||||||
@@ -610,7 +579,7 @@ class DupeGuru(Broadcaster):
|
|||||||
def do(j):
|
def do(j):
|
||||||
self.results.load_from_xml(filename, self._get_file, j)
|
self.results.load_from_xml(filename, self._get_file, j)
|
||||||
|
|
||||||
self._start_job(JobType.Load, do)
|
self._start_job(JobType.LOAD, do)
|
||||||
|
|
||||||
def make_selected_reference(self):
|
def make_selected_reference(self):
|
||||||
"""Promote :attr:`selected_dupes` to reference position within their respective groups.
|
"""Promote :attr:`selected_dupes` to reference position within their respective groups.
|
||||||
@@ -623,8 +592,7 @@ class DupeGuru(Broadcaster):
|
|||||||
changed_groups = set()
|
changed_groups = set()
|
||||||
for dupe in dupes:
|
for dupe in dupes:
|
||||||
g = self.results.get_group_of_duplicate(dupe)
|
g = self.results.get_group_of_duplicate(dupe)
|
||||||
if g not in changed_groups:
|
if g not in changed_groups and self.results.make_ref(dupe):
|
||||||
if self.results.make_ref(dupe):
|
|
||||||
changed_groups.add(g)
|
changed_groups.add(g)
|
||||||
# It's not always obvious to users what this action does, so to make it a bit clearer,
|
# It's not always obvious to users what this action does, so to make it a bit clearer,
|
||||||
# we change our selection to the ref of all changed groups. However, we also want to keep
|
# we change our selection to the ref of all changed groups. However, we also want to keep
|
||||||
@@ -634,9 +602,7 @@ class DupeGuru(Broadcaster):
|
|||||||
if not self.result_table.power_marker:
|
if not self.result_table.power_marker:
|
||||||
if changed_groups:
|
if changed_groups:
|
||||||
self.selected_dupes = [
|
self.selected_dupes = [
|
||||||
d
|
d for d in self.selected_dupes if self.results.get_group_of_duplicate(d).ref is d
|
||||||
for d in self.selected_dupes
|
|
||||||
if self.results.get_group_of_duplicate(d).ref is d
|
|
||||||
]
|
]
|
||||||
self.notify("results_changed")
|
self.notify("results_changed")
|
||||||
else:
|
else:
|
||||||
@@ -648,20 +614,17 @@ class DupeGuru(Broadcaster):
|
|||||||
self.notify("results_changed_but_keep_selection")
|
self.notify("results_changed_but_keep_selection")
|
||||||
|
|
||||||
def mark_all(self):
|
def mark_all(self):
|
||||||
"""Set all dupes in the results as marked.
|
"""Set all dupes in the results as marked."""
|
||||||
"""
|
|
||||||
self.results.mark_all()
|
self.results.mark_all()
|
||||||
self.notify("marking_changed")
|
self.notify("marking_changed")
|
||||||
|
|
||||||
def mark_none(self):
|
def mark_none(self):
|
||||||
"""Set all dupes in the results as unmarked.
|
"""Set all dupes in the results as unmarked."""
|
||||||
"""
|
|
||||||
self.results.mark_none()
|
self.results.mark_none()
|
||||||
self.notify("marking_changed")
|
self.notify("marking_changed")
|
||||||
|
|
||||||
def mark_invert(self):
|
def mark_invert(self):
|
||||||
"""Invert the marked state of all dupes in the results.
|
"""Invert the marked state of all dupes in the results."""
|
||||||
"""
|
|
||||||
self.results.mark_invert()
|
self.results.mark_invert()
|
||||||
self.notify("marking_changed")
|
self.notify("marking_changed")
|
||||||
|
|
||||||
@@ -679,18 +642,15 @@ class DupeGuru(Broadcaster):
|
|||||||
self.notify("marking_changed")
|
self.notify("marking_changed")
|
||||||
|
|
||||||
def open_selected(self):
|
def open_selected(self):
|
||||||
"""Open :attr:`selected_dupes` with their associated application.
|
"""Open :attr:`selected_dupes` with their associated application."""
|
||||||
"""
|
if len(self.selected_dupes) > 10 and not self.view.ask_yes_no(MSG_MANY_FILES_TO_OPEN):
|
||||||
if len(self.selected_dupes) > 10:
|
|
||||||
if not self.view.ask_yes_no(MSG_MANY_FILES_TO_OPEN):
|
|
||||||
return
|
return
|
||||||
for dupe in self.selected_dupes:
|
for dupe in self.selected_dupes:
|
||||||
desktop.open_path(dupe.path)
|
desktop.open_path(dupe.path)
|
||||||
|
|
||||||
def purge_ignore_list(self):
|
def purge_ignore_list(self):
|
||||||
"""Remove files that don't exist from :attr:`ignore_list`.
|
"""Remove files that don't exist from :attr:`ignore_list`."""
|
||||||
"""
|
self.ignore_list.filter(lambda f, s: op.exists(f) and op.exists(s))
|
||||||
self.ignore_list.Filter(lambda f, s: op.exists(f) and op.exists(s))
|
|
||||||
self.ignore_list_dialog.refresh()
|
self.ignore_list_dialog.refresh()
|
||||||
|
|
||||||
def remove_directories(self, indexes):
|
def remove_directories(self, indexes):
|
||||||
@@ -719,8 +679,7 @@ class DupeGuru(Broadcaster):
|
|||||||
self.notify("results_changed_but_keep_selection")
|
self.notify("results_changed_but_keep_selection")
|
||||||
|
|
||||||
def remove_marked(self):
|
def remove_marked(self):
|
||||||
"""Removed marked duplicates from the results (without touching the files themselves).
|
"""Removed marked duplicates from the results (without touching the files themselves)."""
|
||||||
"""
|
|
||||||
if not self.results.mark_count:
|
if not self.results.mark_count:
|
||||||
self.view.show_message(MSG_NO_MARKED_DUPES)
|
self.view.show_message(MSG_NO_MARKED_DUPES)
|
||||||
return
|
return
|
||||||
@@ -731,8 +690,7 @@ class DupeGuru(Broadcaster):
|
|||||||
self._results_changed()
|
self._results_changed()
|
||||||
|
|
||||||
def remove_selected(self):
|
def remove_selected(self):
|
||||||
"""Removed :attr:`selected_dupes` from the results (without touching the files themselves).
|
"""Removed :attr:`selected_dupes` from the results (without touching the files themselves)."""
|
||||||
"""
|
|
||||||
dupes = self.without_ref(self.selected_dupes)
|
dupes = self.without_ref(self.selected_dupes)
|
||||||
if not dupes:
|
if not dupes:
|
||||||
self.view.show_message(MSG_NO_SELECTED_DUPES)
|
self.view.show_message(MSG_NO_SELECTED_DUPES)
|
||||||
@@ -770,10 +728,10 @@ class DupeGuru(Broadcaster):
|
|||||||
for group in self.results.groups:
|
for group in self.results.groups:
|
||||||
if group.prioritize(key_func=sort_key):
|
if group.prioritize(key_func=sort_key):
|
||||||
count += 1
|
count += 1
|
||||||
|
if count:
|
||||||
|
self.results.refresh_required = True
|
||||||
self._results_changed()
|
self._results_changed()
|
||||||
msg = tr("{} duplicate groups were changed by the re-prioritization.").format(
|
msg = tr("{} duplicate groups were changed by the re-prioritization.").format(count)
|
||||||
count
|
|
||||||
)
|
|
||||||
self.view.show_message(msg)
|
self.view.show_message(msg)
|
||||||
|
|
||||||
def reveal_selected(self):
|
def reveal_selected(self):
|
||||||
@@ -817,15 +775,13 @@ class DupeGuru(Broadcaster):
|
|||||||
"""
|
"""
|
||||||
scanner = self.SCANNER_CLASS()
|
scanner = self.SCANNER_CLASS()
|
||||||
if not self.directories.has_any_file():
|
if not self.directories.has_any_file():
|
||||||
self.view.show_message(
|
self.view.show_message(tr("The selected directories contain no scannable file."))
|
||||||
tr("The selected directories contain no scannable file.")
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
# Send relevant options down to the scanner instance
|
# Send relevant options down to the scanner instance
|
||||||
for k, v in self.options.items():
|
for k, v in self.options.items():
|
||||||
if hasattr(scanner, k):
|
if hasattr(scanner, k):
|
||||||
setattr(scanner, k, v)
|
setattr(scanner, k, v)
|
||||||
if self.app_mode == AppMode.Picture:
|
if self.app_mode == AppMode.PICTURE:
|
||||||
scanner.cache_path = self._get_picture_cache_path()
|
scanner.cache_path = self._get_picture_cache_path()
|
||||||
self.results.groups = []
|
self.results.groups = []
|
||||||
self._recreate_result_table()
|
self._recreate_result_table()
|
||||||
@@ -833,21 +789,17 @@ class DupeGuru(Broadcaster):
|
|||||||
|
|
||||||
def do(j):
|
def do(j):
|
||||||
j.set_progress(0, tr("Collecting files to scan"))
|
j.set_progress(0, tr("Collecting files to scan"))
|
||||||
if scanner.scan_type == ScanType.Folders:
|
if scanner.scan_type == ScanType.FOLDERS:
|
||||||
files = list(
|
files = list(self.directories.get_folders(folderclass=se.fs.Folder, j=j))
|
||||||
self.directories.get_folders(folderclass=se.fs.Folder, j=j)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
files = list(
|
files = list(self.directories.get_files(fileclasses=self.fileclasses, j=j))
|
||||||
self.directories.get_files(fileclasses=self.fileclasses, j=j)
|
|
||||||
)
|
|
||||||
if self.options["ignore_hardlink_matches"]:
|
if self.options["ignore_hardlink_matches"]:
|
||||||
files = self._remove_hardlink_dupes(files)
|
files = self._remove_hardlink_dupes(files)
|
||||||
logging.info("Scanning %d files" % len(files))
|
logging.info("Scanning %d files" % len(files))
|
||||||
self.results.groups = scanner.get_dupe_groups(files, self.ignore_list, j)
|
self.results.groups = scanner.get_dupe_groups(files, self.ignore_list, j)
|
||||||
self.discarded_file_count = scanner.discarded_file_count
|
self.discarded_file_count = scanner.discarded_file_count
|
||||||
|
|
||||||
self._start_job(JobType.Scan, do)
|
self._start_job(JobType.SCAN, do)
|
||||||
|
|
||||||
def toggle_selected_mark_state(self):
|
def toggle_selected_mark_state(self):
|
||||||
selected = self.without_ref(self.selected_dupes)
|
selected = self.without_ref(self.selected_dupes)
|
||||||
@@ -862,13 +814,8 @@ class DupeGuru(Broadcaster):
|
|||||||
self.notify("marking_changed")
|
self.notify("marking_changed")
|
||||||
|
|
||||||
def without_ref(self, dupes):
|
def without_ref(self, dupes):
|
||||||
"""Returns ``dupes`` with all reference elements removed.
|
"""Returns ``dupes`` with all reference elements removed."""
|
||||||
"""
|
return [dupe for dupe in dupes if self.results.get_group_of_duplicate(dupe).ref is not dupe]
|
||||||
return [
|
|
||||||
dupe
|
|
||||||
for dupe in dupes
|
|
||||||
if self.results.get_group_of_duplicate(dupe).ref is not dupe
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_default(self, key, fallback_value=None):
|
def get_default(self, key, fallback_value=None):
|
||||||
result = nonone(self.view.get_default(key), fallback_value)
|
result = nonone(self.view.get_default(key), fallback_value)
|
||||||
@@ -897,18 +844,18 @@ class DupeGuru(Broadcaster):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def SCANNER_CLASS(self):
|
def SCANNER_CLASS(self):
|
||||||
if self.app_mode == AppMode.Picture:
|
if self.app_mode == AppMode.PICTURE:
|
||||||
return pe.scanner.ScannerPE
|
return pe.scanner.ScannerPE
|
||||||
elif self.app_mode == AppMode.Music:
|
elif self.app_mode == AppMode.MUSIC:
|
||||||
return me.scanner.ScannerME
|
return me.scanner.ScannerME
|
||||||
else:
|
else:
|
||||||
return se.scanner.ScannerSE
|
return se.scanner.ScannerSE
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def METADATA_TO_READ(self):
|
def METADATA_TO_READ(self):
|
||||||
if self.app_mode == AppMode.Picture:
|
if self.app_mode == AppMode.PICTURE:
|
||||||
return ["size", "mtime", "dimensions", "exif_timestamp"]
|
return ["size", "mtime", "dimensions", "exif_timestamp"]
|
||||||
elif self.app_mode == AppMode.Music:
|
elif self.app_mode == AppMode.MUSIC:
|
||||||
return [
|
return [
|
||||||
"size",
|
"size",
|
||||||
"mtime",
|
"mtime",
|
||||||
|
|||||||
@@ -30,9 +30,9 @@ class DirectoryState:
|
|||||||
* DirectoryState.Excluded: Don't scan this folder
|
* DirectoryState.Excluded: Don't scan this folder
|
||||||
"""
|
"""
|
||||||
|
|
||||||
Normal = 0
|
NORMAL = 0
|
||||||
Reference = 1
|
REFERENCE = 1
|
||||||
Excluded = 2
|
EXCLUDED = 2
|
||||||
|
|
||||||
|
|
||||||
class AlreadyThereError(Exception):
|
class AlreadyThereError(Exception):
|
||||||
@@ -82,60 +82,49 @@ class Directories:
|
|||||||
# We iterate even if we only have one item here
|
# We iterate even if we only have one item here
|
||||||
for denied_path_re in self._exclude_list.compiled:
|
for denied_path_re in self._exclude_list.compiled:
|
||||||
if denied_path_re.match(str(path.name)):
|
if denied_path_re.match(str(path.name)):
|
||||||
return DirectoryState.Excluded
|
return DirectoryState.EXCLUDED
|
||||||
# return # We still use the old logic to force state on hidden dirs
|
# return # We still use the old logic to force state on hidden dirs
|
||||||
# Override this in subclasses to specify the state of some special folders.
|
# Override this in subclasses to specify the state of some special folders.
|
||||||
if path.name.startswith("."):
|
if path.name.startswith("."):
|
||||||
return DirectoryState.Excluded
|
return DirectoryState.EXCLUDED
|
||||||
|
|
||||||
def _get_files(self, from_path, fileclasses, j):
|
def _get_files(self, from_path, fileclasses, j):
|
||||||
for root, dirs, files in os.walk(str(from_path)):
|
for root, dirs, files in os.walk(str(from_path)):
|
||||||
j.check_if_cancelled()
|
j.check_if_cancelled()
|
||||||
rootPath = Path(root)
|
root_path = Path(root)
|
||||||
state = self.get_state(rootPath)
|
state = self.get_state(root_path)
|
||||||
if state == DirectoryState.Excluded:
|
if state == DirectoryState.EXCLUDED and not any(p[: len(root_path)] == root_path for p in self.states):
|
||||||
# Recursively get files from folders with lots of subfolder is expensive. However, there
|
# Recursively get files from folders with lots of subfolder is expensive. However, there
|
||||||
# might be a subfolder in this path that is not excluded. What we want to do is to skim
|
# might be a subfolder in this path that is not excluded. What we want to do is to skim
|
||||||
# through self.states and see if we must continue, or we can stop right here to save time
|
# through self.states and see if we must continue, or we can stop right here to save time
|
||||||
if not any(p[: len(rootPath)] == rootPath for p in self.states):
|
|
||||||
del dirs[:]
|
del dirs[:]
|
||||||
try:
|
try:
|
||||||
if state != DirectoryState.Excluded:
|
if state != DirectoryState.EXCLUDED:
|
||||||
# Old logic
|
# Old logic
|
||||||
if self._exclude_list is None or not self._exclude_list.mark_count:
|
if self._exclude_list is None or not self._exclude_list.mark_count:
|
||||||
found_files = [fs.get_file(rootPath + f, fileclasses=fileclasses) for f in files]
|
found_files = [fs.get_file(root_path + f, fileclasses=fileclasses) for f in files]
|
||||||
else:
|
else:
|
||||||
found_files = []
|
found_files = []
|
||||||
# print(f"len of files: {len(files)} {files}")
|
# print(f"len of files: {len(files)} {files}")
|
||||||
for f in files:
|
for f in files:
|
||||||
found = False
|
if not self._exclude_list.is_excluded(root, f):
|
||||||
for expr in self._exclude_list.compiled_files:
|
found_files.append(fs.get_file(root_path + f, fileclasses=fileclasses))
|
||||||
if expr.match(f):
|
|
||||||
found = True
|
|
||||||
break
|
|
||||||
if not found:
|
|
||||||
for expr in self._exclude_list.compiled_paths:
|
|
||||||
if expr.match(root + os.sep + f):
|
|
||||||
found = True
|
|
||||||
break
|
|
||||||
if not found:
|
|
||||||
found_files.append(fs.get_file(rootPath + f, fileclasses=fileclasses))
|
|
||||||
found_files = [f for f in found_files if f is not None]
|
found_files = [f for f in found_files if f is not None]
|
||||||
# In some cases, directories can be considered as files by dupeGuru, which is
|
# In some cases, directories can be considered as files by dupeGuru, which is
|
||||||
# why we have this line below. In fact, there only one case: Bundle files under
|
# why we have this line below. In fact, there only one case: Bundle files under
|
||||||
# OS X... In other situations, this forloop will do nothing.
|
# OS X... In other situations, this forloop will do nothing.
|
||||||
for d in dirs[:]:
|
for d in dirs[:]:
|
||||||
f = fs.get_file(rootPath + d, fileclasses=fileclasses)
|
f = fs.get_file(root_path + d, fileclasses=fileclasses)
|
||||||
if f is not None:
|
if f is not None:
|
||||||
found_files.append(f)
|
found_files.append(f)
|
||||||
dirs.remove(d)
|
dirs.remove(d)
|
||||||
logging.debug(
|
logging.debug(
|
||||||
"Collected %d files in folder %s",
|
"Collected %d files in folder %s",
|
||||||
len(found_files),
|
len(found_files),
|
||||||
str(rootPath),
|
str(root_path),
|
||||||
)
|
)
|
||||||
for file in found_files:
|
for file in found_files:
|
||||||
file.is_ref = state == DirectoryState.Reference
|
file.is_ref = state == DirectoryState.REFERENCE
|
||||||
yield file
|
yield file
|
||||||
except (EnvironmentError, fs.InvalidPath):
|
except (EnvironmentError, fs.InvalidPath):
|
||||||
pass
|
pass
|
||||||
@@ -147,8 +136,8 @@ class Directories:
|
|||||||
for folder in self._get_folders(subfolder, j):
|
for folder in self._get_folders(subfolder, j):
|
||||||
yield folder
|
yield folder
|
||||||
state = self.get_state(from_folder.path)
|
state = self.get_state(from_folder.path)
|
||||||
if state != DirectoryState.Excluded:
|
if state != DirectoryState.EXCLUDED:
|
||||||
from_folder.is_ref = state == DirectoryState.Reference
|
from_folder.is_ref = state == DirectoryState.REFERENCE
|
||||||
logging.debug("Yielding Folder %r state: %d", from_folder, state)
|
logging.debug("Yielding Folder %r state: %d", from_folder, state)
|
||||||
yield from_folder
|
yield from_folder
|
||||||
except (EnvironmentError, fs.InvalidPath):
|
except (EnvironmentError, fs.InvalidPath):
|
||||||
@@ -217,9 +206,9 @@ class Directories:
|
|||||||
# direct match? easy result.
|
# direct match? easy result.
|
||||||
if path in self.states:
|
if path in self.states:
|
||||||
return self.states[path]
|
return self.states[path]
|
||||||
state = self._default_state_for_path(path) or DirectoryState.Normal
|
state = self._default_state_for_path(path) or DirectoryState.NORMAL
|
||||||
# Save non-default states in cache, necessary for _get_files()
|
# Save non-default states in cache, necessary for _get_files()
|
||||||
if state != DirectoryState.Normal:
|
if state != DirectoryState.NORMAL:
|
||||||
self.states[path] = state
|
self.states[path] = state
|
||||||
return state
|
return state
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,11 @@ from hscommon.util import flatten, multi_replace
|
|||||||
from hscommon.trans import tr
|
from hscommon.trans import tr
|
||||||
from hscommon.jobprogress import job
|
from hscommon.jobprogress import job
|
||||||
|
|
||||||
(WEIGHT_WORDS, MATCH_SIMILAR_WORDS, NO_FIELD_ORDER,) = range(3)
|
(
|
||||||
|
WEIGHT_WORDS,
|
||||||
|
MATCH_SIMILAR_WORDS,
|
||||||
|
NO_FIELD_ORDER,
|
||||||
|
) = range(3)
|
||||||
|
|
||||||
JOB_REFRESH_RATE = 100
|
JOB_REFRESH_RATE = 100
|
||||||
|
|
||||||
@@ -26,8 +30,17 @@ def getwords(s):
|
|||||||
# We decompose the string so that ascii letters with accents can be part of the word.
|
# We decompose the string so that ascii letters with accents can be part of the word.
|
||||||
s = normalize("NFD", s)
|
s = normalize("NFD", s)
|
||||||
s = multi_replace(s, "-_&+():;\\[]{}.,<>/?~!@#$*", " ").lower()
|
s = multi_replace(s, "-_&+():;\\[]{}.,<>/?~!@#$*", " ").lower()
|
||||||
|
# logging.debug(f"DEBUG chars for: {s}\n"
|
||||||
|
# f"{[c for c in s if ord(c) != 32]}\n"
|
||||||
|
# f"{[ord(c) for c in s if ord(c) != 32]}")
|
||||||
|
# HACK We shouldn't ignore non-ascii characters altogether. Any Unicode char
|
||||||
|
# above common european characters that cannot be "sanitized" (ie. stripped
|
||||||
|
# of their accents, etc.) are preserved as is. The arbitrary limit is
|
||||||
|
# obtained from this one: ord("\u037e") GREEK QUESTION MARK
|
||||||
s = "".join(
|
s = "".join(
|
||||||
c for c in s if c in string.ascii_letters + string.digits + string.whitespace
|
c
|
||||||
|
for c in s
|
||||||
|
if (ord(c) <= 894 and c in string.ascii_letters + string.digits + string.whitespace) or ord(c) > 894
|
||||||
)
|
)
|
||||||
return [_f for _f in s.split(" ") if _f] # remove empty elements
|
return [_f for _f in s.split(" ") if _f] # remove empty elements
|
||||||
|
|
||||||
@@ -93,20 +106,18 @@ def compare_fields(first, second, flags=()):
|
|||||||
# We don't want to remove field directly in the list. We must work on a copy.
|
# We don't want to remove field directly in the list. We must work on a copy.
|
||||||
second = second[:]
|
second = second[:]
|
||||||
for field1 in first:
|
for field1 in first:
|
||||||
max = 0
|
max_score = 0
|
||||||
matched_field = None
|
matched_field = None
|
||||||
for field2 in second:
|
for field2 in second:
|
||||||
r = compare(field1, field2, flags)
|
r = compare(field1, field2, flags)
|
||||||
if r > max:
|
if r > max_score:
|
||||||
max = r
|
max_score = r
|
||||||
matched_field = field2
|
matched_field = field2
|
||||||
results.append(max)
|
results.append(max_score)
|
||||||
if matched_field:
|
if matched_field:
|
||||||
second.remove(matched_field)
|
second.remove(matched_field)
|
||||||
else:
|
else:
|
||||||
results = [
|
results = [compare(field1, field2, flags) for field1, field2 in zip(first, second)]
|
||||||
compare(field1, field2, flags) for field1, field2 in zip(first, second)
|
|
||||||
]
|
|
||||||
return min(results) if results else 0
|
return min(results) if results else 0
|
||||||
|
|
||||||
|
|
||||||
@@ -119,9 +130,7 @@ def build_word_dict(objects, j=job.nulljob):
|
|||||||
The result will be a dict with words as keys, lists of objects as values.
|
The result will be a dict with words as keys, lists of objects as values.
|
||||||
"""
|
"""
|
||||||
result = defaultdict(set)
|
result = defaultdict(set)
|
||||||
for object in j.iter_with_progress(
|
for object in j.iter_with_progress(objects, "Prepared %d/%d files", JOB_REFRESH_RATE):
|
||||||
objects, "Prepared %d/%d files", JOB_REFRESH_RATE
|
|
||||||
):
|
|
||||||
for word in unpack_fields(object.words):
|
for word in unpack_fields(object.words):
|
||||||
result[word].add(object)
|
result[word].add(object)
|
||||||
return result
|
return result
|
||||||
@@ -156,9 +165,7 @@ def reduce_common_words(word_dict, threshold):
|
|||||||
The exception to this removal are the objects where all the words of the object are common.
|
The exception to this removal are the objects where all the words of the object are common.
|
||||||
Because if we remove them, we will miss some duplicates!
|
Because if we remove them, we will miss some duplicates!
|
||||||
"""
|
"""
|
||||||
uncommon_words = set(
|
uncommon_words = set(word for word, objects in word_dict.items() if len(objects) < threshold)
|
||||||
word for word, objects in word_dict.items() if len(objects) < threshold
|
|
||||||
)
|
|
||||||
for word, objects in list(word_dict.items()):
|
for word, objects in list(word_dict.items()):
|
||||||
if len(objects) < threshold:
|
if len(objects) < threshold:
|
||||||
continue
|
continue
|
||||||
@@ -264,17 +271,16 @@ def getmatches(
|
|||||||
# This is the place where the memory usage is at its peak during the scan.
|
# This is the place where the memory usage is at its peak during the scan.
|
||||||
# Just continue the process with an incomplete list of matches.
|
# Just continue the process with an incomplete list of matches.
|
||||||
del compared # This should give us enough room to call logging.
|
del compared # This should give us enough room to call logging.
|
||||||
logging.warning(
|
logging.warning("Memory Overflow. Matches: %d. Word dict: %d" % (len(result), len(word_dict)))
|
||||||
"Memory Overflow. Matches: %d. Word dict: %d"
|
|
||||||
% (len(result), len(word_dict))
|
|
||||||
)
|
|
||||||
return result
|
return result
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def getmatches_by_contents(files, j=job.nulljob):
|
def getmatches_by_contents(files, bigsize=0, j=job.nulljob):
|
||||||
"""Returns a list of :class:`Match` within ``files`` if their contents is the same.
|
"""Returns a list of :class:`Match` within ``files`` if their contents is the same.
|
||||||
|
|
||||||
|
:param bigsize: The size in bytes over which we consider files big enough to
|
||||||
|
justify taking samples of md5. If 0, compute md5 as usual.
|
||||||
:param j: A :ref:`job progress instance <jobs>`.
|
:param j: A :ref:`job progress instance <jobs>`.
|
||||||
"""
|
"""
|
||||||
size2files = defaultdict(set)
|
size2files = defaultdict(set)
|
||||||
@@ -291,6 +297,10 @@ def getmatches_by_contents(files, j=job.nulljob):
|
|||||||
if first.is_ref and second.is_ref:
|
if first.is_ref and second.is_ref:
|
||||||
continue # Don't spend time comparing two ref pics together.
|
continue # Don't spend time comparing two ref pics together.
|
||||||
if first.md5partial == second.md5partial:
|
if first.md5partial == second.md5partial:
|
||||||
|
if bigsize > 0 and first.size > bigsize:
|
||||||
|
if first.md5samples == second.md5samples:
|
||||||
|
result.append(Match(first, second, 100))
|
||||||
|
else:
|
||||||
if first.md5 == second.md5:
|
if first.md5 == second.md5:
|
||||||
result.append(Match(first, second, 100))
|
result.append(Match(first, second, 100))
|
||||||
j.add_progress(desc=tr("%d matches found") % len(result))
|
j.add_progress(desc=tr("%d matches found") % len(result))
|
||||||
@@ -391,18 +401,13 @@ class Group:
|
|||||||
|
|
||||||
You can call this after the duplicate scanning process to free a bit of memory.
|
You can call this after the duplicate scanning process to free a bit of memory.
|
||||||
"""
|
"""
|
||||||
discarded = set(
|
discarded = set(m for m in self.matches if not all(obj in self.unordered for obj in [m.first, m.second]))
|
||||||
m
|
|
||||||
for m in self.matches
|
|
||||||
if not all(obj in self.unordered for obj in [m.first, m.second])
|
|
||||||
)
|
|
||||||
self.matches -= discarded
|
self.matches -= discarded
|
||||||
self.candidates = defaultdict(set)
|
self.candidates = defaultdict(set)
|
||||||
return discarded
|
return discarded
|
||||||
|
|
||||||
def get_match_of(self, item):
|
def get_match_of(self, item):
|
||||||
"""Returns the match pair between ``item`` and :attr:`ref`.
|
"""Returns the match pair between ``item`` and :attr:`ref`."""
|
||||||
"""
|
|
||||||
if item is self.ref:
|
if item is self.ref:
|
||||||
return
|
return
|
||||||
for m in self._get_matches_for_ref():
|
for m in self._get_matches_for_ref():
|
||||||
@@ -418,8 +423,7 @@ class Group:
|
|||||||
"""
|
"""
|
||||||
# tie_breaker(ref, dupe) --> True if dupe should be ref
|
# tie_breaker(ref, dupe) --> True if dupe should be ref
|
||||||
# Returns True if anything changed during prioritization.
|
# Returns True if anything changed during prioritization.
|
||||||
master_key_func = lambda x: (-x.is_ref, key_func(x))
|
new_order = sorted(self.ordered, key=lambda x: (-x.is_ref, key_func(x)))
|
||||||
new_order = sorted(self.ordered, key=master_key_func)
|
|
||||||
changed = new_order != self.ordered
|
changed = new_order != self.ordered
|
||||||
self.ordered = new_order
|
self.ordered = new_order
|
||||||
if tie_breaker is None:
|
if tie_breaker is None:
|
||||||
@@ -442,9 +446,7 @@ class Group:
|
|||||||
self.unordered.remove(item)
|
self.unordered.remove(item)
|
||||||
self._percentage = None
|
self._percentage = None
|
||||||
self._matches_for_ref = None
|
self._matches_for_ref = None
|
||||||
if (len(self) > 1) and any(
|
if (len(self) > 1) and any(not getattr(item, "is_ref", False) for item in self):
|
||||||
not getattr(item, "is_ref", False) for item in self
|
|
||||||
):
|
|
||||||
if discard_matches:
|
if discard_matches:
|
||||||
self.matches = set(m for m in self.matches if item not in m)
|
self.matches = set(m for m in self.matches if item not in m)
|
||||||
else:
|
else:
|
||||||
@@ -453,8 +455,7 @@ class Group:
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
def switch_ref(self, with_dupe):
|
def switch_ref(self, with_dupe):
|
||||||
"""Make the :attr:`ref` dupe of the group switch position with ``with_dupe``.
|
"""Make the :attr:`ref` dupe of the group switch position with ``with_dupe``."""
|
||||||
"""
|
|
||||||
if self.ref.is_ref:
|
if self.ref.is_ref:
|
||||||
return False
|
return False
|
||||||
try:
|
try:
|
||||||
@@ -473,9 +474,7 @@ class Group:
|
|||||||
if self._percentage is None:
|
if self._percentage is None:
|
||||||
if self.dupes:
|
if self.dupes:
|
||||||
matches = self._get_matches_for_ref()
|
matches = self._get_matches_for_ref()
|
||||||
self._percentage = sum(match.percentage for match in matches) // len(
|
self._percentage = sum(match.percentage for match in matches) // len(matches)
|
||||||
matches
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
self._percentage = 0
|
self._percentage = 0
|
||||||
return self._percentage
|
return self._percentage
|
||||||
@@ -530,12 +529,8 @@ def get_groups(matches):
|
|||||||
orphan_matches = []
|
orphan_matches = []
|
||||||
for group in groups:
|
for group in groups:
|
||||||
orphan_matches += {
|
orphan_matches += {
|
||||||
m
|
m for m in group.discard_matches() if not any(obj in matched_files for obj in [m.first, m.second])
|
||||||
for m in group.discard_matches()
|
|
||||||
if not any(obj in matched_files for obj in [m.first, m.second])
|
|
||||||
}
|
}
|
||||||
if groups and orphan_matches:
|
if groups and orphan_matches:
|
||||||
groups += get_groups(
|
groups += get_groups(orphan_matches) # no job, as it isn't supposed to take a long time
|
||||||
orphan_matches
|
|
||||||
) # no job, as it isn't supposed to take a long time
|
|
||||||
return groups
|
return groups
|
||||||
|
|||||||
117
core/exclude.py
117
core/exclude.py
@@ -4,6 +4,7 @@
|
|||||||
|
|
||||||
from .markable import Markable
|
from .markable import Markable
|
||||||
from xml.etree import ElementTree as ET
|
from xml.etree import ElementTree as ET
|
||||||
|
|
||||||
# TODO: perhaps use regex module for better Unicode support? https://pypi.org/project/regex/
|
# TODO: perhaps use regex module for better Unicode support? https://pypi.org/project/regex/
|
||||||
# also https://pypi.org/project/re2/
|
# also https://pypi.org/project/re2/
|
||||||
# TODO update the Result list with newly added regexes if possible
|
# TODO update the Result list with newly added regexes if possible
|
||||||
@@ -15,13 +16,14 @@ from hscommon.util import FileOrPath
|
|||||||
from hscommon.plat import ISWINDOWS
|
from hscommon.plat import ISWINDOWS
|
||||||
import time
|
import time
|
||||||
|
|
||||||
default_regexes = [r"^thumbs\.db$", # Obsolete after WindowsXP
|
default_regexes = [
|
||||||
|
r"^thumbs\.db$", # Obsolete after WindowsXP
|
||||||
r"^desktop\.ini$", # Windows metadata
|
r"^desktop\.ini$", # Windows metadata
|
||||||
r"^\.DS_Store$", # MacOS metadata
|
r"^\.DS_Store$", # MacOS metadata
|
||||||
r"^\.Trash\-.*", # Linux trash directories
|
r"^\.Trash\-.*", # Linux trash directories
|
||||||
r"^\$Recycle\.Bin$", # Windows
|
r"^\$Recycle\.Bin$", # Windows
|
||||||
r"^\..*", # Hidden files on Unix-like
|
r"^\..*", # Hidden files on Unix-like
|
||||||
]
|
]
|
||||||
# These are too broad
|
# These are too broad
|
||||||
forbidden_regexes = [r".*", r"\/.*", r".*\/.*", r".*\\\\.*", r".*\..*"]
|
forbidden_regexes = [r".*", r"\/.*", r".*\/.*", r".*\\\\.*", r".*\..*"]
|
||||||
|
|
||||||
@@ -34,6 +36,7 @@ def timer(func):
|
|||||||
end = time.perf_counter_ns()
|
end = time.perf_counter_ns()
|
||||||
print(f"DEBUG: func {func.__name__!r} took {end - start} ns.")
|
print(f"DEBUG: func {func.__name__!r} took {end - start} ns.")
|
||||||
return value
|
return value
|
||||||
|
|
||||||
return wrapper_timer
|
return wrapper_timer
|
||||||
|
|
||||||
|
|
||||||
@@ -45,11 +48,13 @@ def memoize(func):
|
|||||||
if args not in func.cache:
|
if args not in func.cache:
|
||||||
func.cache[args] = func(*args)
|
func.cache[args] = func(*args)
|
||||||
return func.cache[args]
|
return func.cache[args]
|
||||||
|
|
||||||
return _memoize
|
return _memoize
|
||||||
|
|
||||||
|
|
||||||
class AlreadyThereException(Exception):
|
class AlreadyThereException(Exception):
|
||||||
"""Expression already in the list"""
|
"""Expression already in the list"""
|
||||||
|
|
||||||
def __init__(self, arg="Expression is already in excluded list."):
|
def __init__(self, arg="Expression is already in excluded list."):
|
||||||
super().__init__(arg)
|
super().__init__(arg)
|
||||||
|
|
||||||
@@ -81,7 +86,7 @@ class ExcludeList(Markable):
|
|||||||
yield self.is_marked(regex), regex
|
yield self.is_marked(regex), regex
|
||||||
|
|
||||||
def __contains__(self, item):
|
def __contains__(self, item):
|
||||||
return self.isExcluded(item)
|
return self.has_entry(item)
|
||||||
|
|
||||||
def __len__(self):
|
def __len__(self):
|
||||||
"""Returns the total number of regexes regardless of mark status."""
|
"""Returns the total number of regexes regardless of mark status."""
|
||||||
@@ -145,10 +150,7 @@ class ExcludeList(Markable):
|
|||||||
# @timer
|
# @timer
|
||||||
@memoize
|
@memoize
|
||||||
def _do_compile(self, expr):
|
def _do_compile(self, expr):
|
||||||
try:
|
|
||||||
return re.compile(expr)
|
return re.compile(expr)
|
||||||
except Exception as e:
|
|
||||||
raise(e)
|
|
||||||
|
|
||||||
# @timer
|
# @timer
|
||||||
# @memoize # probably not worth memoizing this one if we memoize the above
|
# @memoize # probably not worth memoizing this one if we memoize the above
|
||||||
@@ -169,11 +171,11 @@ class ExcludeList(Markable):
|
|||||||
|
|
||||||
def build_compiled_caches(self, union=False):
|
def build_compiled_caches(self, union=False):
|
||||||
if not union:
|
if not union:
|
||||||
self._cached_compiled_files =\
|
self._cached_compiled_files = [x for x in self._excluded_compiled if not has_sep(x.pattern)]
|
||||||
[x for x in self._excluded_compiled if not has_sep(x.pattern)]
|
self._cached_compiled_paths = [x for x in self._excluded_compiled if has_sep(x.pattern)]
|
||||||
self._cached_compiled_paths =\
|
self._dirty = False
|
||||||
[x for x in self._excluded_compiled if has_sep(x.pattern)]
|
|
||||||
return
|
return
|
||||||
|
|
||||||
marked_count = [x for marked, x in self if marked]
|
marked_count = [x for marked, x in self if marked]
|
||||||
# If there is no item, the compiled Pattern will be '' and match everything!
|
# If there is no item, the compiled Pattern will be '' and match everything!
|
||||||
if not marked_count:
|
if not marked_count:
|
||||||
@@ -183,28 +185,25 @@ class ExcludeList(Markable):
|
|||||||
else:
|
else:
|
||||||
# HACK returned as a tuple to get a free iterator and keep interface
|
# HACK returned as a tuple to get a free iterator and keep interface
|
||||||
# the same regardless of whether the client asked for union or not
|
# the same regardless of whether the client asked for union or not
|
||||||
self._cached_compiled_union_all =\
|
self._cached_compiled_union_all = (re.compile("|".join(marked_count)),)
|
||||||
(re.compile('|'.join(marked_count)),)
|
|
||||||
files_marked = [x for x in marked_count if not has_sep(x)]
|
files_marked = [x for x in marked_count if not has_sep(x)]
|
||||||
if not files_marked:
|
if not files_marked:
|
||||||
self._cached_compiled_union_files = tuple()
|
self._cached_compiled_union_files = tuple()
|
||||||
else:
|
else:
|
||||||
self._cached_compiled_union_files =\
|
self._cached_compiled_union_files = (re.compile("|".join(files_marked)),)
|
||||||
(re.compile('|'.join(files_marked)),)
|
|
||||||
paths_marked = [x for x in marked_count if has_sep(x)]
|
paths_marked = [x for x in marked_count if has_sep(x)]
|
||||||
if not paths_marked:
|
if not paths_marked:
|
||||||
self._cached_compiled_union_paths = tuple()
|
self._cached_compiled_union_paths = tuple()
|
||||||
else:
|
else:
|
||||||
self._cached_compiled_union_paths =\
|
self._cached_compiled_union_paths = (re.compile("|".join(paths_marked)),)
|
||||||
(re.compile('|'.join(paths_marked)),)
|
self._dirty = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def compiled(self):
|
def compiled(self):
|
||||||
"""Should be used by other classes to retrieve the up-to-date list of patterns."""
|
"""Should be used by other classes to retrieve the up-to-date list of patterns."""
|
||||||
if self._use_union:
|
if self._use_union:
|
||||||
if self._dirty:
|
if self._dirty:
|
||||||
self.build_compiled_caches(True)
|
self.build_compiled_caches(self._use_union)
|
||||||
self._dirty = False
|
|
||||||
return self._cached_compiled_union_all
|
return self._cached_compiled_union_all
|
||||||
return self._excluded_compiled
|
return self._excluded_compiled
|
||||||
|
|
||||||
@@ -215,29 +214,25 @@ class ExcludeList(Markable):
|
|||||||
The interface should be expected to be a generator, even if it returns only
|
The interface should be expected to be a generator, even if it returns only
|
||||||
one item (one Pattern in the union case)."""
|
one item (one Pattern in the union case)."""
|
||||||
if self._dirty:
|
if self._dirty:
|
||||||
self.build_compiled_caches(True if self._use_union else False)
|
self.build_compiled_caches(self._use_union)
|
||||||
self._dirty = False
|
return self._cached_compiled_union_files if self._use_union else self._cached_compiled_files
|
||||||
return self._cached_compiled_union_files if self._use_union\
|
|
||||||
else self._cached_compiled_files
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def compiled_paths(self):
|
def compiled_paths(self):
|
||||||
"""Returns patterns with only separators in them, for more precise filtering."""
|
"""Returns patterns with only separators in them, for more precise filtering."""
|
||||||
if self._dirty:
|
if self._dirty:
|
||||||
self.build_compiled_caches(True if self._use_union else False)
|
self.build_compiled_caches(self._use_union)
|
||||||
self._dirty = False
|
return self._cached_compiled_union_paths if self._use_union else self._cached_compiled_paths
|
||||||
return self._cached_compiled_union_paths if self._use_union\
|
|
||||||
else self._cached_compiled_paths
|
|
||||||
|
|
||||||
# ---Public
|
# ---Public
|
||||||
def add(self, regex, forced=False):
|
def add(self, regex, forced=False):
|
||||||
"""This interface should throw exceptions if there is an error during
|
"""This interface should throw exceptions if there is an error during
|
||||||
regex compilation"""
|
regex compilation"""
|
||||||
if self.isExcluded(regex):
|
if self.has_entry(regex):
|
||||||
# This exception should never be ignored
|
# This exception should never be ignored
|
||||||
raise AlreadyThereException()
|
raise AlreadyThereException()
|
||||||
if regex in forbidden_regexes:
|
if regex in forbidden_regexes:
|
||||||
raise Exception("Forbidden (dangerous) expression.")
|
raise ValueError("Forbidden (dangerous) expression.")
|
||||||
|
|
||||||
iscompilable, exception, compiled = self.compile_re(regex)
|
iscompilable, exception, compiled = self.compile_re(regex)
|
||||||
if not iscompilable and not forced:
|
if not iscompilable and not forced:
|
||||||
@@ -256,12 +251,27 @@ class ExcludeList(Markable):
|
|||||||
"""Returns the number of marked regexes only."""
|
"""Returns the number of marked regexes only."""
|
||||||
return len([x for marked, x in self if marked])
|
return len([x for marked, x in self if marked])
|
||||||
|
|
||||||
def isExcluded(self, regex):
|
def has_entry(self, regex):
|
||||||
for item in self._excluded:
|
for item in self._excluded:
|
||||||
if regex == item[0]:
|
if regex == item[0]:
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def is_excluded(self, dirname, filename):
|
||||||
|
"""Return True if the file or the absolute path to file is supposed to be
|
||||||
|
filtered out, False otherwise."""
|
||||||
|
matched = False
|
||||||
|
for expr in self.compiled_files:
|
||||||
|
if expr.fullmatch(filename):
|
||||||
|
matched = True
|
||||||
|
break
|
||||||
|
if not matched:
|
||||||
|
for expr in self.compiled_paths:
|
||||||
|
if expr.fullmatch(dirname + sep + filename):
|
||||||
|
matched = True
|
||||||
|
break
|
||||||
|
return matched
|
||||||
|
|
||||||
def remove(self, regex):
|
def remove(self, regex):
|
||||||
for item in self._excluded:
|
for item in self._excluded:
|
||||||
if item[0] == regex:
|
if item[0] == regex:
|
||||||
@@ -280,13 +290,14 @@ class ExcludeList(Markable):
|
|||||||
was_marked = self.is_marked(regex)
|
was_marked = self.is_marked(regex)
|
||||||
is_compilable, exception, compiled = self.compile_re(newregex)
|
is_compilable, exception, compiled = self.compile_re(newregex)
|
||||||
# We overwrite the found entry
|
# We overwrite the found entry
|
||||||
self._excluded[self._excluded.index(item)] =\
|
self._excluded[self._excluded.index(item)] = [newregex, is_compilable, exception, compiled]
|
||||||
[newregex, is_compilable, exception, compiled]
|
|
||||||
self._remove_compiled(regex)
|
self._remove_compiled(regex)
|
||||||
break
|
break
|
||||||
if not found:
|
if not found:
|
||||||
return
|
return
|
||||||
if is_compilable and was_marked:
|
if is_compilable:
|
||||||
|
self._add_compiled(newregex)
|
||||||
|
if was_marked:
|
||||||
# Not marked by default when added, add it back
|
# Not marked by default when added, add it back
|
||||||
self.mark(newregex)
|
self.mark(newregex)
|
||||||
|
|
||||||
@@ -300,7 +311,7 @@ class ExcludeList(Markable):
|
|||||||
if regex not in default_regexes:
|
if regex not in default_regexes:
|
||||||
self.unmark(regex)
|
self.unmark(regex)
|
||||||
for default_regex in default_regexes:
|
for default_regex in default_regexes:
|
||||||
if not self.isExcluded(default_regex):
|
if not self.has_entry(default_regex):
|
||||||
self.add(default_regex)
|
self.add(default_regex)
|
||||||
self.mark(default_regex)
|
self.mark(default_regex)
|
||||||
|
|
||||||
@@ -326,8 +337,10 @@ class ExcludeList(Markable):
|
|||||||
# "forced" avoids compilation exceptions and adds anyway
|
# "forced" avoids compilation exceptions and adds anyway
|
||||||
self.add(regex_string, forced=True)
|
self.add(regex_string, forced=True)
|
||||||
except AlreadyThereException:
|
except AlreadyThereException:
|
||||||
logging.error(f"Regex \"{regex_string}\" \
|
logging.error(
|
||||||
loaded from XML was already present in the list.")
|
f'Regex "{regex_string}" \
|
||||||
|
loaded from XML was already present in the list.'
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
if exclude_item.get("marked") == "y":
|
if exclude_item.get("marked") == "y":
|
||||||
marked.add(regex_string)
|
marked.add(regex_string)
|
||||||
@@ -352,6 +365,7 @@ loaded from XML was already present in the list.")
|
|||||||
class ExcludeDict(ExcludeList):
|
class ExcludeDict(ExcludeList):
|
||||||
"""Exclusion list holding a set of regular expressions as keys, the compiled
|
"""Exclusion list holding a set of regular expressions as keys, the compiled
|
||||||
Pattern, compilation error and compilable boolean as values."""
|
Pattern, compilation error and compilable boolean as values."""
|
||||||
|
|
||||||
# Implemntation around a dictionary instead of a list, which implies
|
# Implemntation around a dictionary instead of a list, which implies
|
||||||
# to keep the index of each string-key as its sub-element and keep it updated
|
# to keep the index of each string-key as its sub-element and keep it updated
|
||||||
# whenever insert/remove is done.
|
# whenever insert/remove is done.
|
||||||
@@ -399,9 +413,9 @@ class ExcludeDict(ExcludeList):
|
|||||||
if self._use_union:
|
if self._use_union:
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
self._excluded_compiled.add(self._excluded[regex]["compiled"])
|
self._excluded_compiled.add(self._excluded.get(regex).get("compiled"))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.warning(f"Exception while adding regex {regex} to compiled set: {e}")
|
logging.error(f"Exception while adding regex {regex} to compiled set: {e}")
|
||||||
return
|
return
|
||||||
|
|
||||||
def is_compilable(self, regex):
|
def is_compilable(self, regex):
|
||||||
@@ -418,14 +432,9 @@ class ExcludeDict(ExcludeList):
|
|||||||
# and other indices should be pushed by one
|
# and other indices should be pushed by one
|
||||||
for value in self._excluded.values():
|
for value in self._excluded.values():
|
||||||
value["index"] += 1
|
value["index"] += 1
|
||||||
self._excluded[regex] = {
|
self._excluded[regex] = {"index": 0, "compilable": iscompilable, "error": exception, "compiled": compiled}
|
||||||
"index": 0,
|
|
||||||
"compilable": iscompilable,
|
|
||||||
"error": exception,
|
|
||||||
"compiled": compiled
|
|
||||||
}
|
|
||||||
|
|
||||||
def isExcluded(self, regex):
|
def has_entry(self, regex):
|
||||||
if regex in self._excluded.keys():
|
if regex in self._excluded.keys():
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
@@ -451,13 +460,15 @@ class ExcludeDict(ExcludeList):
|
|||||||
previous = self._excluded.pop(regex)
|
previous = self._excluded.pop(regex)
|
||||||
iscompilable, error, compiled = self.compile_re(newregex)
|
iscompilable, error, compiled = self.compile_re(newregex)
|
||||||
self._excluded[newregex] = {
|
self._excluded[newregex] = {
|
||||||
"index": previous["index"],
|
"index": previous.get("index"),
|
||||||
"compilable": iscompilable,
|
"compilable": iscompilable,
|
||||||
"error": error,
|
"error": error,
|
||||||
"compiled": compiled
|
"compiled": compiled,
|
||||||
}
|
}
|
||||||
self._remove_compiled(regex)
|
self._remove_compiled(regex)
|
||||||
if was_marked and iscompilable:
|
if iscompilable:
|
||||||
|
self._add_compiled(newregex)
|
||||||
|
if was_marked:
|
||||||
self.mark(newregex)
|
self.mark(newregex)
|
||||||
|
|
||||||
def save_to_xml(self, outfile):
|
def save_to_xml(self, outfile):
|
||||||
@@ -492,8 +503,12 @@ def ordered_keys(_dict):
|
|||||||
|
|
||||||
|
|
||||||
if ISWINDOWS:
|
if ISWINDOWS:
|
||||||
def has_sep(x):
|
|
||||||
return '\\' + sep in x
|
def has_sep(regexp):
|
||||||
|
return "\\" + sep in regexp
|
||||||
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
def has_sep(x):
|
|
||||||
return sep in x
|
def has_sep(regexp):
|
||||||
|
return sep in regexp
|
||||||
|
|||||||
@@ -131,15 +131,11 @@ def export_to_xhtml(colnames, rows):
|
|||||||
indented = "indented"
|
indented = "indented"
|
||||||
filename = row[1]
|
filename = row[1]
|
||||||
cells = "".join(CELL_TEMPLATE.format(value=value) for value in row[2:])
|
cells = "".join(CELL_TEMPLATE.format(value=value) for value in row[2:])
|
||||||
rendered_rows.append(
|
rendered_rows.append(ROW_TEMPLATE.format(indented=indented, filename=filename, cells=cells))
|
||||||
ROW_TEMPLATE.format(indented=indented, filename=filename, cells=cells)
|
|
||||||
)
|
|
||||||
previous_group_id = row[0]
|
previous_group_id = row[0]
|
||||||
rendered_rows = "".join(rendered_rows)
|
rendered_rows = "".join(rendered_rows)
|
||||||
# The main template can't use format because the css code uses {}
|
# The main template can't use format because the css code uses {}
|
||||||
content = MAIN_TEMPLATE.replace("$colheaders", colheaders).replace(
|
content = MAIN_TEMPLATE.replace("$colheaders", colheaders).replace("$rows", rendered_rows)
|
||||||
"$rows", rendered_rows
|
|
||||||
)
|
|
||||||
folder = mkdtemp()
|
folder = mkdtemp()
|
||||||
destpath = op.join(folder, "export.htm")
|
destpath = op.join(folder, "export.htm")
|
||||||
fp = open(destpath, "wt", encoding="utf-8")
|
fp = open(destpath, "wt", encoding="utf-8")
|
||||||
|
|||||||
79
core/fs.py
79
core/fs.py
@@ -12,6 +12,7 @@
|
|||||||
# and I'm doing it now.
|
# and I'm doing it now.
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
from math import floor
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from hscommon.util import nonone, get_file_ext
|
from hscommon.util import nonone, get_file_ext
|
||||||
@@ -30,6 +31,14 @@ __all__ = [
|
|||||||
|
|
||||||
NOT_SET = object()
|
NOT_SET = object()
|
||||||
|
|
||||||
|
# The goal here is to not run out of memory on really big files. However, the chunk
|
||||||
|
# size has to be large enough so that the python loop isn't too costly in terms of
|
||||||
|
# CPU.
|
||||||
|
CHUNK_SIZE = 1024 * 1024 # 1 MiB
|
||||||
|
|
||||||
|
# Minimum size below which partial hashes don't need to be computed
|
||||||
|
MIN_FILE_SIZE = 3 * CHUNK_SIZE # 3MiB, because we take 3 samples
|
||||||
|
|
||||||
|
|
||||||
class FSError(Exception):
|
class FSError(Exception):
|
||||||
cls_message = "An error has occured on '{name}' in '{parent}'"
|
cls_message = "An error has occured on '{name}' in '{parent}'"
|
||||||
@@ -70,15 +79,9 @@ class OperationError(FSError):
|
|||||||
|
|
||||||
|
|
||||||
class File:
|
class File:
|
||||||
"""Represents a file and holds metadata to be used for scanning.
|
"""Represents a file and holds metadata to be used for scanning."""
|
||||||
"""
|
|
||||||
|
|
||||||
INITIAL_INFO = {
|
INITIAL_INFO = {"size": 0, "mtime": 0, "md5": b"", "md5partial": b"", "md5samples": b""}
|
||||||
"size": 0,
|
|
||||||
"mtime": 0,
|
|
||||||
"md5": "",
|
|
||||||
"md5partial": "",
|
|
||||||
}
|
|
||||||
# Slots for File make us save quite a bit of memory. In a memory test I've made with a lot of
|
# Slots for File make us save quite a bit of memory. In a memory test I've made with a lot of
|
||||||
# files, I saved 35% memory usage with "unread" files (no _read_info() call) and gains become
|
# files, I saved 35% memory usage with "unread" files (no _read_info() call) and gains become
|
||||||
# even greater when we take into account read attributes (70%!). Yeah, it's worth it.
|
# even greater when we take into account read attributes (70%!). Yeah, it's worth it.
|
||||||
@@ -98,9 +101,7 @@ class File:
|
|||||||
try:
|
try:
|
||||||
self._read_info(attrname)
|
self._read_info(attrname)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.warning(
|
logging.warning("An error '%s' was raised while decoding '%s'", e, repr(self.path))
|
||||||
"An error '%s' was raised while decoding '%s'", e, repr(self.path)
|
|
||||||
)
|
|
||||||
result = object.__getattribute__(self, attrname)
|
result = object.__getattribute__(self, attrname)
|
||||||
if result is NOT_SET:
|
if result is NOT_SET:
|
||||||
result = self.INITIAL_INFO[attrname]
|
result = self.INITIAL_INFO[attrname]
|
||||||
@@ -112,37 +113,61 @@ class File:
|
|||||||
return (0x4000, 0x4000) # 16Kb
|
return (0x4000, 0x4000) # 16Kb
|
||||||
|
|
||||||
def _read_info(self, field):
|
def _read_info(self, field):
|
||||||
|
# print(f"_read_info({field}) for {self}")
|
||||||
if field in ("size", "mtime"):
|
if field in ("size", "mtime"):
|
||||||
stats = self.path.stat()
|
stats = self.path.stat()
|
||||||
self.size = nonone(stats.st_size, 0)
|
self.size = nonone(stats.st_size, 0)
|
||||||
self.mtime = nonone(stats.st_mtime, 0)
|
self.mtime = nonone(stats.st_mtime, 0)
|
||||||
elif field == "md5partial":
|
elif field == "md5partial":
|
||||||
try:
|
try:
|
||||||
fp = self.path.open("rb")
|
with self.path.open("rb") as fp:
|
||||||
offset, size = self._get_md5partial_offset_and_size()
|
offset, size = self._get_md5partial_offset_and_size()
|
||||||
fp.seek(offset)
|
fp.seek(offset)
|
||||||
partialdata = fp.read(size)
|
partialdata = fp.read(size)
|
||||||
md5 = hashlib.md5(partialdata)
|
md5 = hashlib.md5(partialdata)
|
||||||
self.md5partial = md5.digest()
|
self.md5partial = md5.digest()
|
||||||
fp.close()
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
elif field == "md5":
|
elif field == "md5":
|
||||||
try:
|
try:
|
||||||
fp = self.path.open("rb")
|
with self.path.open("rb") as fp:
|
||||||
md5 = hashlib.md5()
|
md5 = hashlib.md5()
|
||||||
# The goal here is to not run out of memory on really big files. However, the chunk
|
|
||||||
# size has to be large enough so that the python loop isn't too costly in terms of
|
|
||||||
# CPU.
|
|
||||||
CHUNK_SIZE = 1024 * 1024 # 1 mb
|
|
||||||
filedata = fp.read(CHUNK_SIZE)
|
filedata = fp.read(CHUNK_SIZE)
|
||||||
while filedata:
|
while filedata:
|
||||||
md5.update(filedata)
|
md5.update(filedata)
|
||||||
filedata = fp.read(CHUNK_SIZE)
|
filedata = fp.read(CHUNK_SIZE)
|
||||||
|
# FIXME For python 3.8 and later
|
||||||
|
# while filedata := fp.read(CHUNK_SIZE):
|
||||||
|
# md5.update(filedata)
|
||||||
self.md5 = md5.digest()
|
self.md5 = md5.digest()
|
||||||
fp.close()
|
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
|
elif field == "md5samples":
|
||||||
|
try:
|
||||||
|
with self.path.open("rb") as fp:
|
||||||
|
size = self.size
|
||||||
|
# Might as well hash such small files entirely.
|
||||||
|
if size <= MIN_FILE_SIZE:
|
||||||
|
setattr(self, field, self.md5)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Chunk at 25% of the file
|
||||||
|
fp.seek(floor(size * 25 / 100), 0)
|
||||||
|
filedata = fp.read(CHUNK_SIZE)
|
||||||
|
md5 = hashlib.md5(filedata)
|
||||||
|
|
||||||
|
# Chunk at 60% of the file
|
||||||
|
fp.seek(floor(size * 60 / 100), 0)
|
||||||
|
filedata = fp.read(CHUNK_SIZE)
|
||||||
|
md5.update(filedata)
|
||||||
|
|
||||||
|
# Last chunk of the file
|
||||||
|
fp.seek(-CHUNK_SIZE, 2)
|
||||||
|
filedata = fp.read(CHUNK_SIZE)
|
||||||
|
md5.update(filedata)
|
||||||
|
setattr(self, field, md5.digest())
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(f"Error computing md5samples: {e}")
|
||||||
|
|
||||||
def _read_all_info(self, attrnames=None):
|
def _read_all_info(self, attrnames=None):
|
||||||
"""Cache all possible info.
|
"""Cache all possible info.
|
||||||
@@ -157,8 +182,7 @@ class File:
|
|||||||
# --- Public
|
# --- Public
|
||||||
@classmethod
|
@classmethod
|
||||||
def can_handle(cls, path):
|
def can_handle(cls, path):
|
||||||
"""Returns whether this file wrapper class can handle ``path``.
|
"""Returns whether this file wrapper class can handle ``path``."""
|
||||||
"""
|
|
||||||
return not path.islink() and path.isfile()
|
return not path.islink() and path.isfile()
|
||||||
|
|
||||||
def rename(self, newname):
|
def rename(self, newname):
|
||||||
@@ -176,8 +200,7 @@ class File:
|
|||||||
self.path = destpath
|
self.path = destpath
|
||||||
|
|
||||||
def get_display_info(self, group, delta):
|
def get_display_info(self, group, delta):
|
||||||
"""Returns a display-ready dict of dupe's data.
|
"""Returns a display-ready dict of dupe's data."""
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
# --- Properties
|
# --- Properties
|
||||||
@@ -197,7 +220,7 @@ class File:
|
|||||||
class Folder(File):
|
class Folder(File):
|
||||||
"""A wrapper around a folder path.
|
"""A wrapper around a folder path.
|
||||||
|
|
||||||
It has the size/md5 info of a File, but it's value are the sum of its subitems.
|
It has the size/md5 info of a File, but its value is the sum of its subitems.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__slots__ = File.__slots__ + ("_subfolders",)
|
__slots__ = File.__slots__ + ("_subfolders",)
|
||||||
@@ -212,15 +235,17 @@ class Folder(File):
|
|||||||
return folders + files
|
return folders + files
|
||||||
|
|
||||||
def _read_info(self, field):
|
def _read_info(self, field):
|
||||||
|
# print(f"_read_info({field}) for Folder {self}")
|
||||||
if field in {"size", "mtime"}:
|
if field in {"size", "mtime"}:
|
||||||
size = sum((f.size for f in self._all_items()), 0)
|
size = sum((f.size for f in self._all_items()), 0)
|
||||||
self.size = size
|
self.size = size
|
||||||
stats = self.path.stat()
|
stats = self.path.stat()
|
||||||
self.mtime = nonone(stats.st_mtime, 0)
|
self.mtime = nonone(stats.st_mtime, 0)
|
||||||
elif field in {"md5", "md5partial"}:
|
elif field in {"md5", "md5partial", "md5samples"}:
|
||||||
# What's sensitive here is that we must make sure that subfiles'
|
# What's sensitive here is that we must make sure that subfiles'
|
||||||
# md5 are always added up in the same order, but we also want a
|
# md5 are always added up in the same order, but we also want a
|
||||||
# different md5 if a file gets moved in a different subdirectory.
|
# different md5 if a file gets moved in a different subdirectory.
|
||||||
|
|
||||||
def get_dir_md5_concat():
|
def get_dir_md5_concat():
|
||||||
items = self._all_items()
|
items = self._all_items()
|
||||||
items.sort(key=lambda f: f.path)
|
items.sort(key=lambda f: f.path)
|
||||||
@@ -234,9 +259,7 @@ class Folder(File):
|
|||||||
@property
|
@property
|
||||||
def subfolders(self):
|
def subfolders(self):
|
||||||
if self._subfolders is None:
|
if self._subfolders is None:
|
||||||
subfolders = [
|
subfolders = [p for p in self.path.listdir() if not p.islink() and p.isdir()]
|
||||||
p for p in self.path.listdir() if not p.islink() and p.isdir()
|
|
||||||
]
|
|
||||||
self._subfolders = [self.__class__(p) for p in subfolders]
|
self._subfolders = [self.__class__(p) for p in subfolders]
|
||||||
return self._subfolders
|
return self._subfolders
|
||||||
|
|
||||||
|
|||||||
@@ -15,16 +15,21 @@ class DupeGuruGUIObject(Listener):
|
|||||||
self.app = app
|
self.app = app
|
||||||
|
|
||||||
def directories_changed(self):
|
def directories_changed(self):
|
||||||
|
# Implemented in child classes
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def dupes_selected(self):
|
def dupes_selected(self):
|
||||||
|
# Implemented in child classes
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def marking_changed(self):
|
def marking_changed(self):
|
||||||
|
# Implemented in child classes
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def results_changed(self):
|
def results_changed(self):
|
||||||
|
# Implemented in child classes
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def results_changed_but_keep_selection(self):
|
def results_changed_but_keep_selection(self):
|
||||||
|
# Implemented in child classes
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -29,8 +29,7 @@ class DeletionOptionsView:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def update_msg(self, msg: str):
|
def update_msg(self, msg: str):
|
||||||
"""Update the dialog's prompt with ``str``.
|
"""Update the dialog's prompt with ``str``."""
|
||||||
"""
|
|
||||||
|
|
||||||
def show(self):
|
def show(self):
|
||||||
"""Show the dialog in a modal fashion.
|
"""Show the dialog in a modal fashion.
|
||||||
@@ -39,8 +38,7 @@ class DeletionOptionsView:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def set_hardlink_option_enabled(self, is_enabled: bool):
|
def set_hardlink_option_enabled(self, is_enabled: bool):
|
||||||
"""Enable or disable the widget controlling :attr:`DeletionOptions.use_hardlinks`.
|
"""Enable or disable the widget controlling :attr:`DeletionOptions.use_hardlinks`."""
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class DeletionOptions(GUIObject):
|
class DeletionOptions(GUIObject):
|
||||||
@@ -75,8 +73,7 @@ class DeletionOptions(GUIObject):
|
|||||||
return self.view.show()
|
return self.view.show()
|
||||||
|
|
||||||
def supports_links(self):
|
def supports_links(self):
|
||||||
"""Returns whether our platform supports symlinks.
|
"""Returns whether our platform supports symlinks."""
|
||||||
"""
|
|
||||||
# When on a platform that doesn't implement it, calling os.symlink() (with the wrong number
|
# When on a platform that doesn't implement it, calling os.symlink() (with the wrong number
|
||||||
# of arguments) raises NotImplementedError, which allows us to gracefully check for the
|
# of arguments) raises NotImplementedError, which allows us to gracefully check for the
|
||||||
# feature.
|
# feature.
|
||||||
|
|||||||
@@ -32,9 +32,7 @@ class DetailsPanel(GUIObject, DupeGuruGUIObject):
|
|||||||
# we don't want the two sides of the table to display the stats for the same file
|
# we don't want the two sides of the table to display the stats for the same file
|
||||||
ref = group.ref if group is not None and group.ref is not dupe else None
|
ref = group.ref if group is not None and group.ref is not dupe else None
|
||||||
data2 = self.app.get_display_info(ref, group, False)
|
data2 = self.app.get_display_info(ref, group, False)
|
||||||
columns = self.app.result_table.COLUMNS[
|
columns = self.app.result_table.COLUMNS[1:] # first column is the 'marked' column
|
||||||
1:
|
|
||||||
] # first column is the 'marked' column
|
|
||||||
self._table = [(c.display, data1[c.name], data2[c.name]) for c in columns]
|
self._table = [(c.display, data1[c.name], data2[c.name]) for c in columns]
|
||||||
|
|
||||||
# --- Public
|
# --- Public
|
||||||
@@ -46,5 +44,4 @@ class DetailsPanel(GUIObject, DupeGuruGUIObject):
|
|||||||
|
|
||||||
# --- Event Handlers
|
# --- Event Handlers
|
||||||
def dupes_selected(self):
|
def dupes_selected(self):
|
||||||
self._refresh()
|
self._view_updated()
|
||||||
self.view.refresh()
|
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ from hscommon.gui.tree import Tree, Node
|
|||||||
from ..directories import DirectoryState
|
from ..directories import DirectoryState
|
||||||
from .base import DupeGuruGUIObject
|
from .base import DupeGuruGUIObject
|
||||||
|
|
||||||
STATE_ORDER = [DirectoryState.Normal, DirectoryState.Reference, DirectoryState.Excluded]
|
STATE_ORDER = [DirectoryState.NORMAL, DirectoryState.REFERENCE, DirectoryState.EXCLUDED]
|
||||||
|
|
||||||
|
|
||||||
# Lazily loads children
|
# Lazily loads children
|
||||||
@@ -36,9 +36,7 @@ class DirectoryNode(Node):
|
|||||||
self._loaded = True
|
self._loaded = True
|
||||||
|
|
||||||
def update_all_states(self):
|
def update_all_states(self):
|
||||||
self._state = STATE_ORDER.index(
|
self._state = STATE_ORDER.index(self._tree.app.directories.get_state(self._directory_path))
|
||||||
self._tree.app.directories.get_state(self._directory_path)
|
|
||||||
)
|
|
||||||
for node in self:
|
for node in self:
|
||||||
node.update_all_states()
|
node.update_all_states()
|
||||||
|
|
||||||
@@ -88,9 +86,9 @@ class DirectoryTree(Tree, DupeGuruGUIObject):
|
|||||||
else:
|
else:
|
||||||
# All selected nodes or on second-or-more level, exclude them.
|
# All selected nodes or on second-or-more level, exclude them.
|
||||||
nodes = self.selected_nodes
|
nodes = self.selected_nodes
|
||||||
newstate = DirectoryState.Excluded
|
newstate = DirectoryState.EXCLUDED
|
||||||
if all(node.state == DirectoryState.Excluded for node in nodes):
|
if all(node.state == DirectoryState.EXCLUDED for node in nodes):
|
||||||
newstate = DirectoryState.Normal
|
newstate = DirectoryState.NORMAL
|
||||||
for node in nodes:
|
for node in nodes:
|
||||||
node.state = newstate
|
node.state = newstate
|
||||||
|
|
||||||
@@ -105,5 +103,4 @@ class DirectoryTree(Tree, DupeGuruGUIObject):
|
|||||||
|
|
||||||
# --- Event Handlers
|
# --- Event Handlers
|
||||||
def directories_changed(self):
|
def directories_changed(self):
|
||||||
self._refresh()
|
self._view_updated()
|
||||||
self.view.refresh()
|
|
||||||
|
|||||||
@@ -5,8 +5,9 @@
|
|||||||
# which should be included with this package. The terms are also available at
|
# which should be included with this package. The terms are also available at
|
||||||
# http://www.gnu.org/licenses/gpl-3.0.html
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
||||||
|
|
||||||
# from hscommon.trans import tr
|
|
||||||
from .exclude_list_table import ExcludeListTable
|
from .exclude_list_table import ExcludeListTable
|
||||||
|
from core.exclude import has_sep
|
||||||
|
from os import sep
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
@@ -30,9 +31,10 @@ class ExcludeListDialogCore:
|
|||||||
self.refresh()
|
self.refresh()
|
||||||
|
|
||||||
def rename_selected(self, newregex):
|
def rename_selected(self, newregex):
|
||||||
"""Renames the selected regex to ``newregex``.
|
"""Rename the selected regex to ``newregex``.
|
||||||
If there's more than one selected row, the first one is used.
|
If there is more than one selected row, the first one is used.
|
||||||
:param str newregex: The regex to rename the row's regex to.
|
:param str newregex: The regex to rename the row's regex to.
|
||||||
|
:return bool: true if success, false if error.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
r = self.exclude_list_table.selected_rows[0]
|
r = self.exclude_list_table.selected_rows[0]
|
||||||
@@ -44,25 +46,42 @@ class ExcludeListDialogCore:
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
def add(self, regex):
|
def add(self, regex):
|
||||||
try:
|
|
||||||
self.exclude_list.add(regex)
|
self.exclude_list.add(regex)
|
||||||
except Exception as e:
|
|
||||||
raise(e)
|
|
||||||
self.exclude_list.mark(regex)
|
self.exclude_list.mark(regex)
|
||||||
self.exclude_list_table.add(regex)
|
self.exclude_list_table.add(regex)
|
||||||
|
|
||||||
def test_string(self, test_string):
|
def test_string(self, test_string):
|
||||||
"""Sets property on row to highlight if its regex matches test_string supplied."""
|
"""Set the highlight property on each row when its regex matches the
|
||||||
|
test_string supplied. Return True if any row matched."""
|
||||||
matched = False
|
matched = False
|
||||||
for row in self.exclude_list_table.rows:
|
for row in self.exclude_list_table.rows:
|
||||||
compiled_regex = self.exclude_list.get_compiled(row.regex)
|
compiled_regex = self.exclude_list.get_compiled(row.regex)
|
||||||
if compiled_regex and compiled_regex.match(test_string):
|
|
||||||
matched = True
|
if self.is_match(test_string, compiled_regex):
|
||||||
row.highlight = True
|
row.highlight = True
|
||||||
|
matched = True
|
||||||
else:
|
else:
|
||||||
row.highlight = False
|
row.highlight = False
|
||||||
return matched
|
return matched
|
||||||
|
|
||||||
|
def is_match(self, test_string, compiled_regex):
|
||||||
|
# This method is like an inverted version of ExcludeList.is_excluded()
|
||||||
|
if not compiled_regex:
|
||||||
|
return False
|
||||||
|
matched = False
|
||||||
|
|
||||||
|
# Test only the filename portion of the path
|
||||||
|
if not has_sep(compiled_regex.pattern) and sep in test_string:
|
||||||
|
filename = test_string.rsplit(sep, 1)[1]
|
||||||
|
if compiled_regex.fullmatch(filename):
|
||||||
|
matched = True
|
||||||
|
return matched
|
||||||
|
|
||||||
|
# Test the entire path + filename
|
||||||
|
if compiled_regex.fullmatch(test_string):
|
||||||
|
matched = True
|
||||||
|
return matched
|
||||||
|
|
||||||
def reset_rows_highlight(self):
|
def reset_rows_highlight(self):
|
||||||
for row in self.exclude_list_table.rows:
|
for row in self.exclude_list_table.rows:
|
||||||
row.highlight = False
|
row.highlight = False
|
||||||
|
|||||||
@@ -6,19 +6,17 @@ from .base import DupeGuruGUIObject
|
|||||||
from hscommon.gui.table import GUITable, Row
|
from hscommon.gui.table import GUITable, Row
|
||||||
from hscommon.gui.column import Column, Columns
|
from hscommon.gui.column import Column, Columns
|
||||||
from hscommon.trans import trget
|
from hscommon.trans import trget
|
||||||
|
|
||||||
tr = trget("ui")
|
tr = trget("ui")
|
||||||
|
|
||||||
|
|
||||||
class ExcludeListTable(GUITable, DupeGuruGUIObject):
|
class ExcludeListTable(GUITable, DupeGuruGUIObject):
|
||||||
COLUMNS = [
|
COLUMNS = [Column("marked", ""), Column("regex", tr("Regular Expressions"))]
|
||||||
Column("marked", ""),
|
|
||||||
Column("regex", tr("Regular Expressions"))
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self, exclude_list_dialog, app):
|
def __init__(self, exclude_list_dialog, app):
|
||||||
GUITable.__init__(self)
|
GUITable.__init__(self)
|
||||||
DupeGuruGUIObject.__init__(self, app)
|
DupeGuruGUIObject.__init__(self, app)
|
||||||
self.columns = Columns(self)
|
self._columns = Columns(self)
|
||||||
self.dialog = exclude_list_dialog
|
self.dialog = exclude_list_dialog
|
||||||
|
|
||||||
def rename_selected(self, newname):
|
def rename_selected(self, newname):
|
||||||
@@ -36,7 +34,7 @@ class ExcludeListTable(GUITable, DupeGuruGUIObject):
|
|||||||
return ExcludeListRow(self, self.dialog.exclude_list.is_marked(regex), regex), 0
|
return ExcludeListRow(self, self.dialog.exclude_list.is_marked(regex), regex), 0
|
||||||
|
|
||||||
def _do_delete(self):
|
def _do_delete(self):
|
||||||
self.dalog.exclude_list.remove(self.selected_row.regex)
|
self.dialog.exclude_list.remove(self.selected_row.regex)
|
||||||
|
|
||||||
# --- Override
|
# --- Override
|
||||||
def add(self, regex):
|
def add(self, regex):
|
||||||
|
|||||||
@@ -22,11 +22,9 @@ class IgnoreListDialog:
|
|||||||
def clear(self):
|
def clear(self):
|
||||||
if not self.ignore_list:
|
if not self.ignore_list:
|
||||||
return
|
return
|
||||||
msg = tr(
|
msg = tr("Do you really want to remove all %d items from the ignore list?") % len(self.ignore_list)
|
||||||
"Do you really want to remove all %d items from the ignore list?"
|
|
||||||
) % len(self.ignore_list)
|
|
||||||
if self.app.view.ask_yes_no(msg):
|
if self.app.view.ask_yes_no(msg):
|
||||||
self.ignore_list.Clear()
|
self.ignore_list.clear()
|
||||||
self.refresh()
|
self.refresh()
|
||||||
|
|
||||||
def refresh(self):
|
def refresh(self):
|
||||||
|
|||||||
@@ -22,7 +22,7 @@ class IgnoreListTable(GUITable):
|
|||||||
|
|
||||||
def __init__(self, ignore_list_dialog):
|
def __init__(self, ignore_list_dialog):
|
||||||
GUITable.__init__(self)
|
GUITable.__init__(self)
|
||||||
self.columns = Columns(self)
|
self._columns = Columns(self)
|
||||||
self.view = None
|
self.view = None
|
||||||
self.dialog = ignore_list_dialog
|
self.dialog = ignore_list_dialog
|
||||||
|
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ class ProblemTable(GUITable):
|
|||||||
|
|
||||||
def __init__(self, problem_dialog):
|
def __init__(self, problem_dialog):
|
||||||
GUITable.__init__(self)
|
GUITable.__init__(self)
|
||||||
self.columns = Columns(self)
|
self._columns = Columns(self)
|
||||||
self.dialog = problem_dialog
|
self.dialog = problem_dialog
|
||||||
|
|
||||||
# --- Override
|
# --- Override
|
||||||
|
|||||||
@@ -41,11 +41,11 @@ class DupeRow(Row):
|
|||||||
# table.DELTA_COLUMNS are always "delta"
|
# table.DELTA_COLUMNS are always "delta"
|
||||||
self._delta_columns = self.table.DELTA_COLUMNS.copy()
|
self._delta_columns = self.table.DELTA_COLUMNS.copy()
|
||||||
dupe_info = self.data
|
dupe_info = self.data
|
||||||
|
if self._group.ref is None:
|
||||||
|
return False
|
||||||
ref_info = self._group.ref.get_display_info(group=self._group, delta=False)
|
ref_info = self._group.ref.get_display_info(group=self._group, delta=False)
|
||||||
for key, value in dupe_info.items():
|
for key, value in dupe_info.items():
|
||||||
if (key not in self._delta_columns) and (
|
if (key not in self._delta_columns) and (ref_info[key].lower() != value.lower()):
|
||||||
ref_info[key].lower() != value.lower()
|
|
||||||
):
|
|
||||||
self._delta_columns.add(key)
|
self._delta_columns.add(key)
|
||||||
return column_name in self._delta_columns
|
return column_name in self._delta_columns
|
||||||
|
|
||||||
@@ -82,7 +82,7 @@ class ResultTable(GUITable, DupeGuruGUIObject):
|
|||||||
def __init__(self, app):
|
def __init__(self, app):
|
||||||
GUITable.__init__(self)
|
GUITable.__init__(self)
|
||||||
DupeGuruGUIObject.__init__(self, app)
|
DupeGuruGUIObject.__init__(self, app)
|
||||||
self.columns = Columns(self, prefaccess=app, savename="ResultTable")
|
self._columns = Columns(self, prefaccess=app, savename="ResultTable")
|
||||||
self._power_marker = False
|
self._power_marker = False
|
||||||
self._delta_values = False
|
self._delta_values = False
|
||||||
self._sort_descriptors = ("name", True)
|
self._sort_descriptors = ("name", True)
|
||||||
@@ -190,4 +190,4 @@ class ResultTable(GUITable, DupeGuruGUIObject):
|
|||||||
self.view.refresh()
|
self.view.refresh()
|
||||||
|
|
||||||
def save_session(self):
|
def save_session(self):
|
||||||
self.columns.save_columns()
|
self._columns.save_columns()
|
||||||
|
|||||||
@@ -20,8 +20,7 @@ class IgnoreList:
|
|||||||
|
|
||||||
# ---Override
|
# ---Override
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self._ignored = {}
|
self.clear()
|
||||||
self._count = 0
|
|
||||||
|
|
||||||
def __iter__(self):
|
def __iter__(self):
|
||||||
for first, seconds in self._ignored.items():
|
for first, seconds in self._ignored.items():
|
||||||
@@ -32,7 +31,7 @@ class IgnoreList:
|
|||||||
return self._count
|
return self._count
|
||||||
|
|
||||||
# ---Public
|
# ---Public
|
||||||
def AreIgnored(self, first, second):
|
def are_ignored(self, first, second):
|
||||||
def do_check(first, second):
|
def do_check(first, second):
|
||||||
try:
|
try:
|
||||||
matches = self._ignored[first]
|
matches = self._ignored[first]
|
||||||
@@ -42,23 +41,23 @@ class IgnoreList:
|
|||||||
|
|
||||||
return do_check(first, second) or do_check(second, first)
|
return do_check(first, second) or do_check(second, first)
|
||||||
|
|
||||||
def Clear(self):
|
def clear(self):
|
||||||
self._ignored = {}
|
self._ignored = {}
|
||||||
self._count = 0
|
self._count = 0
|
||||||
|
|
||||||
def Filter(self, func):
|
def filter(self, func):
|
||||||
"""Applies a filter on all ignored items, and remove all matches where func(first,second)
|
"""Applies a filter on all ignored items, and remove all matches where func(first,second)
|
||||||
doesn't return True.
|
doesn't return True.
|
||||||
"""
|
"""
|
||||||
filtered = IgnoreList()
|
filtered = IgnoreList()
|
||||||
for first, second in self:
|
for first, second in self:
|
||||||
if func(first, second):
|
if func(first, second):
|
||||||
filtered.Ignore(first, second)
|
filtered.ignore(first, second)
|
||||||
self._ignored = filtered._ignored
|
self._ignored = filtered._ignored
|
||||||
self._count = filtered._count
|
self._count = filtered._count
|
||||||
|
|
||||||
def Ignore(self, first, second):
|
def ignore(self, first, second):
|
||||||
if self.AreIgnored(first, second):
|
if self.are_ignored(first, second):
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
matches = self._ignored[first]
|
matches = self._ignored[first]
|
||||||
@@ -88,8 +87,7 @@ class IgnoreList:
|
|||||||
except KeyError:
|
except KeyError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if not inner(first, second):
|
if not inner(first, second) and not inner(second, first):
|
||||||
if not inner(second, first):
|
|
||||||
raise ValueError()
|
raise ValueError()
|
||||||
|
|
||||||
def load_from_xml(self, infile):
|
def load_from_xml(self, infile):
|
||||||
@@ -110,7 +108,7 @@ class IgnoreList:
|
|||||||
for sfn in subfile_elems:
|
for sfn in subfile_elems:
|
||||||
subfile_path = sfn.get("path")
|
subfile_path = sfn.get("path")
|
||||||
if subfile_path:
|
if subfile_path:
|
||||||
self.Ignore(file_path, subfile_path)
|
self.ignore(file_path, subfile_path)
|
||||||
|
|
||||||
def save_to_xml(self, outfile):
|
def save_to_xml(self, outfile):
|
||||||
"""Create a XML file that can be used by load_from_xml.
|
"""Create a XML file that can be used by load_from_xml.
|
||||||
|
|||||||
@@ -17,9 +17,11 @@ class Markable:
|
|||||||
# in self.__marked, and is not affected by __inverted. Thus, self.mark while __inverted
|
# in self.__marked, and is not affected by __inverted. Thus, self.mark while __inverted
|
||||||
# is True will launch _DidUnmark.
|
# is True will launch _DidUnmark.
|
||||||
def _did_mark(self, o):
|
def _did_mark(self, o):
|
||||||
|
# Implemented in child classes
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _did_unmark(self, o):
|
def _did_unmark(self, o):
|
||||||
|
# Implemented in child classes
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _get_markable_count(self):
|
def _get_markable_count(self):
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
# which should be included with this package. The terms are also available at
|
# which should be included with this package. The terms are also available at
|
||||||
# http://www.gnu.org/licenses/gpl-3.0.html
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
||||||
|
|
||||||
from hsaudiotag import auto
|
import mutagen
|
||||||
from hscommon.util import get_file_ext, format_size, format_time
|
from hscommon.util import get_file_ext, format_size, format_time
|
||||||
|
|
||||||
from core.util import format_timestamp, format_perc, format_words, format_dupe_count
|
from core.util import format_timestamp, format_perc, format_words, format_dupe_count
|
||||||
@@ -26,6 +26,9 @@ TAG_FIELDS = {
|
|||||||
"comment",
|
"comment",
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# This is a temporary workaround for migration from hsaudiotag for the can_handle method
|
||||||
|
SUPPORTED_EXTS = {"mp3", "wma", "m4a", "m4p", "ogg", "flac", "aif", "aiff", "aifc"}
|
||||||
|
|
||||||
|
|
||||||
class MusicFile(fs.File):
|
class MusicFile(fs.File):
|
||||||
INITIAL_INFO = fs.File.INITIAL_INFO.copy()
|
INITIAL_INFO = fs.File.INITIAL_INFO.copy()
|
||||||
@@ -50,7 +53,7 @@ class MusicFile(fs.File):
|
|||||||
def can_handle(cls, path):
|
def can_handle(cls, path):
|
||||||
if not fs.File.can_handle(path):
|
if not fs.File.can_handle(path):
|
||||||
return False
|
return False
|
||||||
return get_file_ext(path.name) in auto.EXT2CLASS
|
return get_file_ext(path.name) in SUPPORTED_EXTS
|
||||||
|
|
||||||
def get_display_info(self, group, delta):
|
def get_display_info(self, group, delta):
|
||||||
size = self.size
|
size = self.size
|
||||||
@@ -95,21 +98,23 @@ class MusicFile(fs.File):
|
|||||||
}
|
}
|
||||||
|
|
||||||
def _get_md5partial_offset_and_size(self):
|
def _get_md5partial_offset_and_size(self):
|
||||||
f = auto.File(str(self.path))
|
# No longer calculating the offset and audio size, just whole file
|
||||||
return (f.audio_offset, f.audio_size)
|
size = self.path.stat().st_size
|
||||||
|
return (0, size)
|
||||||
|
|
||||||
def _read_info(self, field):
|
def _read_info(self, field):
|
||||||
fs.File._read_info(self, field)
|
fs.File._read_info(self, field)
|
||||||
if field in TAG_FIELDS:
|
if field in TAG_FIELDS:
|
||||||
f = auto.File(str(self.path))
|
# The various conversions here are to make this look like the previous implementation
|
||||||
self.audiosize = f.audio_size
|
file = mutagen.File(str(self.path), easy=True)
|
||||||
self.bitrate = f.bitrate
|
self.audiosize = self.path.stat().st_size
|
||||||
self.duration = f.duration
|
self.bitrate = file.info.bitrate / 1000
|
||||||
self.samplerate = f.sample_rate
|
self.duration = file.info.length
|
||||||
self.artist = f.artist
|
self.samplerate = file.info.sample_rate
|
||||||
self.album = f.album
|
self.artist = ", ".join(file.tags.get("artist") or [])
|
||||||
self.title = f.title
|
self.album = ", ".join(file.tags.get("album") or [])
|
||||||
self.genre = f.genre
|
self.title = ", ".join(file.tags.get("title") or [])
|
||||||
self.comment = f.comment
|
self.genre = ", ".join(file.tags.get("genre") or [])
|
||||||
self.year = f.year
|
self.comment = ", ".join(file.tags.get("comment") or [""])
|
||||||
self.track = f.track
|
self.year = ", ".join(file.tags.get("date") or [])
|
||||||
|
self.track = (file.tags.get("tracknumber") or [""])[0]
|
||||||
|
|||||||
@@ -17,9 +17,9 @@ class ScannerME(ScannerBase):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_scan_options():
|
def get_scan_options():
|
||||||
return [
|
return [
|
||||||
ScanOption(ScanType.Filename, tr("Filename")),
|
ScanOption(ScanType.FILENAME, tr("Filename")),
|
||||||
ScanOption(ScanType.Fields, tr("Filename - Fields")),
|
ScanOption(ScanType.FIELDS, tr("Filename - Fields")),
|
||||||
ScanOption(ScanType.FieldsNoOrder, tr("Filename - Fields (No Order)")),
|
ScanOption(ScanType.FIELDSNOORDER, tr("Filename - Fields (No Order)")),
|
||||||
ScanOption(ScanType.Tag, tr("Tags")),
|
ScanOption(ScanType.TAG, tr("Tags")),
|
||||||
ScanOption(ScanType.Contents, tr("Contents")),
|
ScanOption(ScanType.CONTENTS, tr("Contents")),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -33,8 +33,7 @@ CacheRow = namedtuple("CacheRow", "id path blocks mtime")
|
|||||||
|
|
||||||
|
|
||||||
class ShelveCache:
|
class ShelveCache:
|
||||||
"""A class to cache picture blocks in a shelve backend.
|
"""A class to cache picture blocks in a shelve backend."""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, db=None, readonly=False):
|
def __init__(self, db=None, readonly=False):
|
||||||
self.istmp = db is None
|
self.istmp = db is None
|
||||||
@@ -81,9 +80,7 @@ class ShelveCache:
|
|||||||
self.shelve[wrap_id(rowid)] = wrap_path(path_str)
|
self.shelve[wrap_id(rowid)] = wrap_path(path_str)
|
||||||
|
|
||||||
def _compute_maxid(self):
|
def _compute_maxid(self):
|
||||||
return max(
|
return max((unwrap_id(k) for k in self.shelve if k.startswith("id:")), default=1)
|
||||||
(unwrap_id(k) for k in self.shelve if k.startswith("id:")), default=1
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_new_id(self):
|
def _get_new_id(self):
|
||||||
self.maxid += 1
|
self.maxid += 1
|
||||||
|
|||||||
@@ -13,8 +13,7 @@ from .cache import string_to_colors, colors_to_string
|
|||||||
|
|
||||||
|
|
||||||
class SqliteCache:
|
class SqliteCache:
|
||||||
"""A class to cache picture blocks in a sqlite backend.
|
"""A class to cache picture blocks in a sqlite backend."""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, db=":memory:", readonly=False):
|
def __init__(self, db=":memory:", readonly=False):
|
||||||
# readonly is not used in the sqlite version of the cache
|
# readonly is not used in the sqlite version of the cache
|
||||||
@@ -71,18 +70,14 @@ class SqliteCache:
|
|||||||
except sqlite.OperationalError:
|
except sqlite.OperationalError:
|
||||||
logging.warning("Picture cache could not set value for key %r", path_str)
|
logging.warning("Picture cache could not set value for key %r", path_str)
|
||||||
except sqlite.DatabaseError as e:
|
except sqlite.DatabaseError as e:
|
||||||
logging.warning(
|
logging.warning("DatabaseError while setting value for key %r: %s", path_str, str(e))
|
||||||
"DatabaseError while setting value for key %r: %s", path_str, str(e)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _create_con(self, second_try=False):
|
def _create_con(self, second_try=False):
|
||||||
def create_tables():
|
def create_tables():
|
||||||
logging.debug("Creating picture cache tables.")
|
logging.debug("Creating picture cache tables.")
|
||||||
self.con.execute("drop table if exists pictures")
|
self.con.execute("drop table if exists pictures")
|
||||||
self.con.execute("drop index if exists idx_path")
|
self.con.execute("drop index if exists idx_path")
|
||||||
self.con.execute(
|
self.con.execute("create table pictures(path TEXT, mtime INTEGER, blocks TEXT)")
|
||||||
"create table pictures(path TEXT, mtime INTEGER, blocks TEXT)"
|
|
||||||
)
|
|
||||||
self.con.execute("create index idx_path on pictures (path)")
|
self.con.execute("create index idx_path on pictures (path)")
|
||||||
|
|
||||||
self.con = sqlite.connect(self.dbname, isolation_level=None)
|
self.con = sqlite.connect(self.dbname, isolation_level=None)
|
||||||
@@ -93,9 +88,7 @@ class SqliteCache:
|
|||||||
except sqlite.DatabaseError as e: # corrupted db
|
except sqlite.DatabaseError as e: # corrupted db
|
||||||
if second_try:
|
if second_try:
|
||||||
raise # Something really strange is happening
|
raise # Something really strange is happening
|
||||||
logging.warning(
|
logging.warning("Could not create picture cache because of an error: %s", str(e))
|
||||||
"Could not create picture cache because of an error: %s", str(e)
|
|
||||||
)
|
|
||||||
self.con.close()
|
self.con.close()
|
||||||
os.remove(self.dbname)
|
os.remove(self.dbname)
|
||||||
self._create_con(second_try=True)
|
self._create_con(second_try=True)
|
||||||
@@ -125,9 +118,7 @@ class SqliteCache:
|
|||||||
raise ValueError(path)
|
raise ValueError(path)
|
||||||
|
|
||||||
def get_multiple(self, rowids):
|
def get_multiple(self, rowids):
|
||||||
sql = "select rowid, blocks from pictures where rowid in (%s)" % ",".join(
|
sql = "select rowid, blocks from pictures where rowid in (%s)" % ",".join(map(str, rowids))
|
||||||
map(str, rowids)
|
|
||||||
)
|
|
||||||
cur = self.con.execute(sql)
|
cur = self.con.execute(sql)
|
||||||
return ((rowid, string_to_colors(blocks)) for rowid, blocks in cur)
|
return ((rowid, string_to_colors(blocks)) for rowid, blocks in cur)
|
||||||
|
|
||||||
@@ -148,7 +139,5 @@ class SqliteCache:
|
|||||||
continue
|
continue
|
||||||
todelete.append(rowid)
|
todelete.append(rowid)
|
||||||
if todelete:
|
if todelete:
|
||||||
sql = "delete from pictures where rowid in (%s)" % ",".join(
|
sql = "delete from pictures where rowid in (%s)" % ",".join(map(str, todelete))
|
||||||
map(str, todelete)
|
|
||||||
)
|
|
||||||
self.con.execute(sql)
|
self.con.execute(sql)
|
||||||
|
|||||||
@@ -193,8 +193,8 @@ class TIFF_file:
|
|||||||
self.s2nfunc = s2n_intel if self.endian == INTEL_ENDIAN else s2n_motorola
|
self.s2nfunc = s2n_intel if self.endian == INTEL_ENDIAN else s2n_motorola
|
||||||
|
|
||||||
def s2n(self, offset, length, signed=0, debug=False):
|
def s2n(self, offset, length, signed=0, debug=False):
|
||||||
slice = self.data[offset : offset + length]
|
data_slice = self.data[offset : offset + length]
|
||||||
val = self.s2nfunc(slice)
|
val = self.s2nfunc(data_slice)
|
||||||
# Sign extension ?
|
# Sign extension ?
|
||||||
if signed:
|
if signed:
|
||||||
msb = 1 << (8 * length - 1)
|
msb = 1 << (8 * length - 1)
|
||||||
@@ -206,7 +206,7 @@ class TIFF_file:
|
|||||||
"Slice for offset %d length %d: %r and value: %d",
|
"Slice for offset %d length %d: %r and value: %d",
|
||||||
offset,
|
offset,
|
||||||
length,
|
length,
|
||||||
slice,
|
data_slice,
|
||||||
val,
|
val,
|
||||||
)
|
)
|
||||||
return val
|
return val
|
||||||
@@ -236,10 +236,10 @@ class TIFF_file:
|
|||||||
for i in range(entries):
|
for i in range(entries):
|
||||||
entry = ifd + 2 + 12 * i
|
entry = ifd + 2 + 12 * i
|
||||||
tag = self.s2n(entry, 2)
|
tag = self.s2n(entry, 2)
|
||||||
type = self.s2n(entry + 2, 2)
|
entry_type = self.s2n(entry + 2, 2)
|
||||||
if not 1 <= type <= 10:
|
if not 1 <= entry_type <= 10:
|
||||||
continue # not handled
|
continue # not handled
|
||||||
typelen = [1, 1, 2, 4, 8, 1, 1, 2, 4, 8][type - 1]
|
typelen = [1, 1, 2, 4, 8, 1, 1, 2, 4, 8][entry_type - 1]
|
||||||
count = self.s2n(entry + 4, 4)
|
count = self.s2n(entry + 4, 4)
|
||||||
if count > MAX_COUNT:
|
if count > MAX_COUNT:
|
||||||
logging.debug("Probably corrupt. Aborting.")
|
logging.debug("Probably corrupt. Aborting.")
|
||||||
@@ -247,25 +247,23 @@ class TIFF_file:
|
|||||||
offset = entry + 8
|
offset = entry + 8
|
||||||
if count * typelen > 4:
|
if count * typelen > 4:
|
||||||
offset = self.s2n(offset, 4)
|
offset = self.s2n(offset, 4)
|
||||||
if type == 2:
|
if entry_type == 2:
|
||||||
# Special case: nul-terminated ASCII string
|
# Special case: nul-terminated ASCII string
|
||||||
values = str(self.data[offset : offset + count - 1], encoding="latin-1")
|
values = str(self.data[offset : offset + count - 1], encoding="latin-1")
|
||||||
else:
|
else:
|
||||||
values = []
|
values = []
|
||||||
signed = type == 6 or type >= 8
|
signed = entry_type == 6 or entry_type >= 8
|
||||||
for j in range(count):
|
for _ in range(count):
|
||||||
if type in {5, 10}:
|
if entry_type in {5, 10}:
|
||||||
# The type is either 5 or 10
|
# The type is either 5 or 10
|
||||||
value_j = Fraction(
|
value_j = Fraction(self.s2n(offset, 4, signed), self.s2n(offset + 4, 4, signed))
|
||||||
self.s2n(offset, 4, signed), self.s2n(offset + 4, 4, signed)
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
# Not a fraction
|
# Not a fraction
|
||||||
value_j = self.s2n(offset, typelen, signed)
|
value_j = self.s2n(offset, typelen, signed)
|
||||||
values.append(value_j)
|
values.append(value_j)
|
||||||
offset = offset + typelen
|
offset = offset + typelen
|
||||||
# Now "values" is either a string or an array
|
# Now "values" is either a string or an array
|
||||||
a.append((tag, type, values))
|
a.append((tag, entry_type, values))
|
||||||
return a
|
return a
|
||||||
|
|
||||||
|
|
||||||
@@ -296,13 +294,11 @@ def get_fields(fp):
|
|||||||
logging.debug("Exif header length: %d bytes", length)
|
logging.debug("Exif header length: %d bytes", length)
|
||||||
data = fp.read(length - 8)
|
data = fp.read(length - 8)
|
||||||
data_format = data[0]
|
data_format = data[0]
|
||||||
logging.debug(
|
logging.debug("%s format", {INTEL_ENDIAN: "Intel", MOTOROLA_ENDIAN: "Motorola"}[data_format])
|
||||||
"%s format", {INTEL_ENDIAN: "Intel", MOTOROLA_ENDIAN: "Motorola"}[data_format]
|
|
||||||
)
|
|
||||||
T = TIFF_file(data)
|
T = TIFF_file(data)
|
||||||
# There may be more than one IFD per file, but we only read the first one because others are
|
# There may be more than one IFD per file, but we only read the first one because others are
|
||||||
# most likely thumbnails.
|
# most likely thumbnails.
|
||||||
main_IFD_offset = T.first_IFD()
|
main_ifd_offset = T.first_IFD()
|
||||||
result = {}
|
result = {}
|
||||||
|
|
||||||
def add_tag_to_result(tag, values):
|
def add_tag_to_result(tag, values):
|
||||||
@@ -314,8 +310,8 @@ def get_fields(fp):
|
|||||||
return # don't overwrite data
|
return # don't overwrite data
|
||||||
result[stag] = values
|
result[stag] = values
|
||||||
|
|
||||||
logging.debug("IFD at offset %d", main_IFD_offset)
|
logging.debug("IFD at offset %d", main_ifd_offset)
|
||||||
IFD = T.dump_IFD(main_IFD_offset)
|
IFD = T.dump_IFD(main_ifd_offset)
|
||||||
exif_off = gps_off = 0
|
exif_off = gps_off = 0
|
||||||
for tag, type, values in IFD:
|
for tag, type, values in IFD:
|
||||||
if tag == 0x8769:
|
if tag == 0x8769:
|
||||||
|
|||||||
@@ -95,9 +95,7 @@ def prepare_pictures(pictures, cache_path, with_dimensions, j=job.nulljob):
|
|||||||
picture.unicode_path,
|
picture.unicode_path,
|
||||||
picture.size,
|
picture.size,
|
||||||
)
|
)
|
||||||
if (
|
if picture.size < 10 * 1024 * 1024: # We're really running out of memory
|
||||||
picture.size < 10 * 1024 * 1024
|
|
||||||
): # We're really running out of memory
|
|
||||||
raise
|
raise
|
||||||
except MemoryError:
|
except MemoryError:
|
||||||
logging.warning("Ran out of memory while preparing pictures")
|
logging.warning("Ran out of memory while preparing pictures")
|
||||||
@@ -106,9 +104,7 @@ def prepare_pictures(pictures, cache_path, with_dimensions, j=job.nulljob):
|
|||||||
|
|
||||||
|
|
||||||
def get_chunks(pictures):
|
def get_chunks(pictures):
|
||||||
min_chunk_count = (
|
min_chunk_count = multiprocessing.cpu_count() * 2 # have enough chunks to feed all subprocesses
|
||||||
multiprocessing.cpu_count() * 2
|
|
||||||
) # have enough chunks to feed all subprocesses
|
|
||||||
chunk_count = len(pictures) // DEFAULT_CHUNK_SIZE
|
chunk_count = len(pictures) // DEFAULT_CHUNK_SIZE
|
||||||
chunk_count = max(min_chunk_count, chunk_count)
|
chunk_count = max(min_chunk_count, chunk_count)
|
||||||
chunk_size = (len(pictures) // chunk_count) + 1
|
chunk_size = (len(pictures) // chunk_count) + 1
|
||||||
@@ -185,9 +181,7 @@ def getmatches(pictures, cache_path, threshold, match_scaled=False, j=job.nulljo
|
|||||||
j.set_progress(comparison_count, progress_msg)
|
j.set_progress(comparison_count, progress_msg)
|
||||||
|
|
||||||
j = j.start_subjob([3, 7])
|
j = j.start_subjob([3, 7])
|
||||||
pictures = prepare_pictures(
|
pictures = prepare_pictures(pictures, cache_path, with_dimensions=not match_scaled, j=j)
|
||||||
pictures, cache_path, with_dimensions=not match_scaled, j=j
|
|
||||||
)
|
|
||||||
j = j.start_subjob([9, 1], tr("Preparing for matching"))
|
j = j.start_subjob([9, 1], tr("Preparing for matching"))
|
||||||
cache = get_cache(cache_path)
|
cache = get_cache(cache_path)
|
||||||
id2picture = {}
|
id2picture = {}
|
||||||
@@ -231,12 +225,8 @@ def getmatches(pictures, cache_path, threshold, match_scaled=False, j=job.nulljo
|
|||||||
chunks,
|
chunks,
|
||||||
pictures,
|
pictures,
|
||||||
) # some wiggle room for the next statements
|
) # some wiggle room for the next statements
|
||||||
logging.warning(
|
logging.warning("Ran out of memory when scanning! We had %d matches.", len(matches))
|
||||||
"Ran out of memory when scanning! We had %d matches.", len(matches)
|
del matches[-len(matches) // 3 :] # some wiggle room to ensure we don't run out of memory again.
|
||||||
)
|
|
||||||
del matches[
|
|
||||||
-len(matches) // 3 :
|
|
||||||
] # some wiggle room to ensure we don't run out of memory again.
|
|
||||||
pool.close()
|
pool.close()
|
||||||
result = []
|
result = []
|
||||||
myiter = j.iter_with_progress(
|
myiter = j.iter_with_progress(
|
||||||
|
|||||||
@@ -2,9 +2,9 @@
|
|||||||
* Created On: 2010-01-30
|
* Created On: 2010-01-30
|
||||||
* Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
* Copyright 2014 Hardcoded Software (http://www.hardcoded.net)
|
||||||
*
|
*
|
||||||
* This software is licensed under the "BSD" License as described in the "LICENSE" file,
|
* This software is licensed under the "BSD" License as described in the
|
||||||
* which should be included with this package. The terms are also available at
|
* "LICENSE" file, which should be included with this package. The terms are
|
||||||
* http://www.hardcoded.net/licenses/bsd_license
|
* also available at http://www.hardcoded.net/licenses/bsd_license
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "common.h"
|
#include "common.h"
|
||||||
@@ -17,8 +17,7 @@ static PyObject *DifferentBlockCountError;
|
|||||||
/* Returns a 3 sized tuple containing the mean color of 'image'.
|
/* Returns a 3 sized tuple containing the mean color of 'image'.
|
||||||
* image: a PIL image or crop.
|
* image: a PIL image or crop.
|
||||||
*/
|
*/
|
||||||
static PyObject* getblock(PyObject *image)
|
static PyObject *getblock(PyObject *image) {
|
||||||
{
|
|
||||||
int i, totr, totg, totb;
|
int i, totr, totg, totb;
|
||||||
Py_ssize_t pixel_count;
|
Py_ssize_t pixel_count;
|
||||||
PyObject *ppixels;
|
PyObject *ppixels;
|
||||||
@@ -30,7 +29,7 @@ static PyObject* getblock(PyObject *image)
|
|||||||
}
|
}
|
||||||
|
|
||||||
pixel_count = PySequence_Length(ppixels);
|
pixel_count = PySequence_Length(ppixels);
|
||||||
for (i=0; i<pixel_count; i++) {
|
for (i = 0; i < pixel_count; i++) {
|
||||||
PyObject *ppixel, *pr, *pg, *pb;
|
PyObject *ppixel, *pr, *pg, *pb;
|
||||||
int r, g, b;
|
int r, g, b;
|
||||||
|
|
||||||
@@ -65,8 +64,7 @@ static PyObject* getblock(PyObject *image)
|
|||||||
/* Returns the difference between the first block and the second.
|
/* Returns the difference between the first block and the second.
|
||||||
* It returns an absolute sum of the 3 differences (RGB).
|
* It returns an absolute sum of the 3 differences (RGB).
|
||||||
*/
|
*/
|
||||||
static int diff(PyObject *first, PyObject *second)
|
static int diff(PyObject *first, PyObject *second) {
|
||||||
{
|
|
||||||
int r1, g1, b1, r2, b2, g2;
|
int r1, g1, b1, r2, b2, g2;
|
||||||
PyObject *pr, *pg, *pb;
|
PyObject *pr, *pg, *pb;
|
||||||
pr = PySequence_ITEM(first, 0);
|
pr = PySequence_ITEM(first, 0);
|
||||||
@@ -93,7 +91,7 @@ static int diff(PyObject *first, PyObject *second)
|
|||||||
}
|
}
|
||||||
|
|
||||||
PyDoc_STRVAR(block_getblocks2_doc,
|
PyDoc_STRVAR(block_getblocks2_doc,
|
||||||
"Returns a list of blocks (3 sized tuples).\n\
|
"Returns a list of blocks (3 sized tuples).\n\
|
||||||
\n\
|
\n\
|
||||||
image: A PIL image to base the blocks on.\n\
|
image: A PIL image to base the blocks on.\n\
|
||||||
block_count_per_side: This integer determine the number of blocks the function will return.\n\
|
block_count_per_side: This integer determine the number of blocks the function will return.\n\
|
||||||
@@ -101,8 +99,7 @@ If it is 10, for example, 100 blocks will be returns (10 width, 10 height). The
|
|||||||
necessarely cover square areas. The area covered by each block will be proportional to the image\n\
|
necessarely cover square areas. The area covered by each block will be proportional to the image\n\
|
||||||
itself.\n");
|
itself.\n");
|
||||||
|
|
||||||
static PyObject* block_getblocks2(PyObject *self, PyObject *args)
|
static PyObject *block_getblocks2(PyObject *self, PyObject *args) {
|
||||||
{
|
|
||||||
int block_count_per_side, width, height, block_width, block_height, ih;
|
int block_count_per_side, width, height, block_width, block_height, ih;
|
||||||
PyObject *image;
|
PyObject *image;
|
||||||
PyObject *pimage_size, *pwidth, *pheight;
|
PyObject *pimage_size, *pwidth, *pheight;
|
||||||
@@ -128,23 +125,23 @@ static PyObject* block_getblocks2(PyObject *self, PyObject *args)
|
|||||||
block_width = max(width / block_count_per_side, 1);
|
block_width = max(width / block_count_per_side, 1);
|
||||||
block_height = max(height / block_count_per_side, 1);
|
block_height = max(height / block_count_per_side, 1);
|
||||||
|
|
||||||
result = PyList_New(block_count_per_side * block_count_per_side);
|
result = PyList_New((Py_ssize_t)block_count_per_side * block_count_per_side);
|
||||||
if (result == NULL) {
|
if (result == NULL) {
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (ih=0; ih<block_count_per_side; ih++) {
|
for (ih = 0; ih < block_count_per_side; ih++) {
|
||||||
int top, bottom, iw;
|
int top, bottom, iw;
|
||||||
top = min(ih*block_height, height-block_height);
|
top = min(ih * block_height, height - block_height);
|
||||||
bottom = top + block_height;
|
bottom = top + block_height;
|
||||||
for (iw=0; iw<block_count_per_side; iw++) {
|
for (iw = 0; iw < block_count_per_side; iw++) {
|
||||||
int left, right;
|
int left, right;
|
||||||
PyObject *pbox;
|
PyObject *pbox;
|
||||||
PyObject *pmethodname;
|
PyObject *pmethodname;
|
||||||
PyObject *pcrop;
|
PyObject *pcrop;
|
||||||
PyObject *pblock;
|
PyObject *pblock;
|
||||||
|
|
||||||
left = min(iw*block_width, width-block_width);
|
left = min(iw * block_width, width - block_width);
|
||||||
right = left + block_width;
|
right = left + block_width;
|
||||||
pbox = inttuple(4, left, top, right, bottom);
|
pbox = inttuple(4, left, top, right, bottom);
|
||||||
pmethodname = PyUnicode_FromString("crop");
|
pmethodname = PyUnicode_FromString("crop");
|
||||||
@@ -161,7 +158,7 @@ static PyObject* block_getblocks2(PyObject *self, PyObject *args)
|
|||||||
Py_DECREF(result);
|
Py_DECREF(result);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
PyList_SET_ITEM(result, ih*block_count_per_side+iw, pblock);
|
PyList_SET_ITEM(result, ih * block_count_per_side + iw, pblock);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -169,19 +166,19 @@ static PyObject* block_getblocks2(PyObject *self, PyObject *args)
|
|||||||
}
|
}
|
||||||
|
|
||||||
PyDoc_STRVAR(block_avgdiff_doc,
|
PyDoc_STRVAR(block_avgdiff_doc,
|
||||||
"Returns the average diff between first blocks and seconds.\n\
|
"Returns the average diff between first blocks and seconds.\n\
|
||||||
\n\
|
\n\
|
||||||
If the result surpasses limit, limit + 1 is returned, except if less than min_iterations\n\
|
If the result surpasses limit, limit + 1 is returned, except if less than min_iterations\n\
|
||||||
iterations have been made in the blocks.\n");
|
iterations have been made in the blocks.\n");
|
||||||
|
|
||||||
static PyObject* block_avgdiff(PyObject *self, PyObject *args)
|
static PyObject *block_avgdiff(PyObject *self, PyObject *args) {
|
||||||
{
|
|
||||||
PyObject *first, *second;
|
PyObject *first, *second;
|
||||||
int limit, min_iterations;
|
int limit, min_iterations;
|
||||||
Py_ssize_t count;
|
Py_ssize_t count;
|
||||||
int sum, i, result;
|
int sum, i, result;
|
||||||
|
|
||||||
if (!PyArg_ParseTuple(args, "OOii", &first, &second, &limit, &min_iterations)) {
|
if (!PyArg_ParseTuple(args, "OOii", &first, &second, &limit,
|
||||||
|
&min_iterations)) {
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -196,7 +193,7 @@ static PyObject* block_avgdiff(PyObject *self, PyObject *args)
|
|||||||
}
|
}
|
||||||
|
|
||||||
sum = 0;
|
sum = 0;
|
||||||
for (i=0; i<count; i++) {
|
for (i = 0; i < count; i++) {
|
||||||
int iteration_count;
|
int iteration_count;
|
||||||
PyObject *item1, *item2;
|
PyObject *item1, *item2;
|
||||||
|
|
||||||
@@ -206,7 +203,8 @@ static PyObject* block_avgdiff(PyObject *self, PyObject *args)
|
|||||||
sum += diff(item1, item2);
|
sum += diff(item1, item2);
|
||||||
Py_DECREF(item1);
|
Py_DECREF(item1);
|
||||||
Py_DECREF(item2);
|
Py_DECREF(item2);
|
||||||
if ((sum > limit*iteration_count) && (iteration_count >= min_iterations)) {
|
if ((sum > limit * iteration_count) &&
|
||||||
|
(iteration_count >= min_iterations)) {
|
||||||
return PyLong_FromLong(limit + 1);
|
return PyLong_FromLong(limit + 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -224,8 +222,7 @@ static PyMethodDef BlockMethods[] = {
|
|||||||
{NULL, NULL, 0, NULL} /* Sentinel */
|
{NULL, NULL, 0, NULL} /* Sentinel */
|
||||||
};
|
};
|
||||||
|
|
||||||
static struct PyModuleDef BlockDef = {
|
static struct PyModuleDef BlockDef = {PyModuleDef_HEAD_INIT,
|
||||||
PyModuleDef_HEAD_INIT,
|
|
||||||
"_block",
|
"_block",
|
||||||
NULL,
|
NULL,
|
||||||
-1,
|
-1,
|
||||||
@@ -233,12 +230,9 @@ static struct PyModuleDef BlockDef = {
|
|||||||
NULL,
|
NULL,
|
||||||
NULL,
|
NULL,
|
||||||
NULL,
|
NULL,
|
||||||
NULL
|
NULL};
|
||||||
};
|
|
||||||
|
|
||||||
PyObject *
|
PyObject *PyInit__block(void) {
|
||||||
PyInit__block(void)
|
|
||||||
{
|
|
||||||
PyObject *m = PyModule_Create(&BlockDef);
|
PyObject *m = PyModule_Create(&BlockDef);
|
||||||
if (m == NULL) {
|
if (m == NULL) {
|
||||||
return NULL;
|
return NULL;
|
||||||
@@ -246,7 +240,8 @@ PyInit__block(void)
|
|||||||
|
|
||||||
NoBlocksError = PyErr_NewException("_block.NoBlocksError", NULL, NULL);
|
NoBlocksError = PyErr_NewException("_block.NoBlocksError", NULL, NULL);
|
||||||
PyModule_AddObject(m, "NoBlocksError", NoBlocksError);
|
PyModule_AddObject(m, "NoBlocksError", NoBlocksError);
|
||||||
DifferentBlockCountError = PyErr_NewException("_block.DifferentBlockCountError", NULL, NULL);
|
DifferentBlockCountError =
|
||||||
|
PyErr_NewException("_block.DifferentBlockCountError", NULL, NULL);
|
||||||
PyModule_AddObject(m, "DifferentBlockCountError", DifferentBlockCountError);
|
PyModule_AddObject(m, "DifferentBlockCountError", DifferentBlockCountError);
|
||||||
|
|
||||||
return m;
|
return m;
|
||||||
|
|||||||
@@ -18,12 +18,12 @@ class ScannerPE(Scanner):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_scan_options():
|
def get_scan_options():
|
||||||
return [
|
return [
|
||||||
ScanOption(ScanType.FuzzyBlock, tr("Contents")),
|
ScanOption(ScanType.FUZZYBLOCK, tr("Contents")),
|
||||||
ScanOption(ScanType.ExifTimestamp, tr("EXIF Timestamp")),
|
ScanOption(ScanType.EXIFTIMESTAMP, tr("EXIF Timestamp")),
|
||||||
]
|
]
|
||||||
|
|
||||||
def _getmatches(self, files, j):
|
def _getmatches(self, files, j):
|
||||||
if self.scan_type == ScanType.FuzzyBlock:
|
if self.scan_type == ScanType.FUZZYBLOCK:
|
||||||
return matchblock.getmatches(
|
return matchblock.getmatches(
|
||||||
files,
|
files,
|
||||||
cache_path=self.cache_path,
|
cache_path=self.cache_path,
|
||||||
@@ -31,7 +31,7 @@ class ScannerPE(Scanner):
|
|||||||
match_scaled=self.match_scaled,
|
match_scaled=self.match_scaled,
|
||||||
j=j,
|
j=j,
|
||||||
)
|
)
|
||||||
elif self.scan_type == ScanType.ExifTimestamp:
|
elif self.scan_type == ScanType.EXIFTIMESTAMP:
|
||||||
return matchexif.getmatches(files, self.match_scaled, j)
|
return matchexif.getmatches(files, self.match_scaled, j)
|
||||||
else:
|
else:
|
||||||
raise Exception("Invalid scan type")
|
raise ValueError("Invalid scan type")
|
||||||
|
|||||||
@@ -52,6 +52,7 @@ class Results(Markable):
|
|||||||
self.app = app
|
self.app = app
|
||||||
self.problems = [] # (dupe, error_msg)
|
self.problems = [] # (dupe, error_msg)
|
||||||
self.is_modified = False
|
self.is_modified = False
|
||||||
|
self.refresh_required = False
|
||||||
|
|
||||||
def _did_mark(self, dupe):
|
def _did_mark(self, dupe):
|
||||||
self.__marked_size += dupe.size
|
self.__marked_size += dupe.size
|
||||||
@@ -94,8 +95,9 @@ class Results(Markable):
|
|||||||
|
|
||||||
# ---Private
|
# ---Private
|
||||||
def __get_dupe_list(self):
|
def __get_dupe_list(self):
|
||||||
if self.__dupes is None:
|
if self.__dupes is None or self.refresh_required:
|
||||||
self.__dupes = flatten(group.dupes for group in self.groups)
|
self.__dupes = flatten(group.dupes for group in self.groups)
|
||||||
|
self.refresh_required = False
|
||||||
if None in self.__dupes:
|
if None in self.__dupes:
|
||||||
# This is debug logging to try to figure out #44
|
# This is debug logging to try to figure out #44
|
||||||
logging.warning(
|
logging.warning(
|
||||||
@@ -104,9 +106,7 @@ class Results(Markable):
|
|||||||
self.groups,
|
self.groups,
|
||||||
)
|
)
|
||||||
if self.__filtered_dupes:
|
if self.__filtered_dupes:
|
||||||
self.__dupes = [
|
self.__dupes = [dupe for dupe in self.__dupes if dupe in self.__filtered_dupes]
|
||||||
dupe for dupe in self.__dupes if dupe in self.__filtered_dupes
|
|
||||||
]
|
|
||||||
sd = self.__dupes_sort_descriptor
|
sd = self.__dupes_sort_descriptor
|
||||||
if sd:
|
if sd:
|
||||||
self.sort_dupes(sd[0], sd[1], sd[2])
|
self.sort_dupes(sd[0], sd[1], sd[2])
|
||||||
@@ -125,18 +125,10 @@ class Results(Markable):
|
|||||||
total_count = self.__total_count
|
total_count = self.__total_count
|
||||||
total_size = self.__total_size
|
total_size = self.__total_size
|
||||||
else:
|
else:
|
||||||
mark_count = len(
|
mark_count = len([dupe for dupe in self.__filtered_dupes if self.is_marked(dupe)])
|
||||||
[dupe for dupe in self.__filtered_dupes if self.is_marked(dupe)]
|
marked_size = sum(dupe.size for dupe in self.__filtered_dupes if self.is_marked(dupe))
|
||||||
)
|
total_count = len([dupe for dupe in self.__filtered_dupes if self.is_markable(dupe)])
|
||||||
marked_size = sum(
|
total_size = sum(dupe.size for dupe in self.__filtered_dupes if self.is_markable(dupe))
|
||||||
dupe.size for dupe in self.__filtered_dupes if self.is_marked(dupe)
|
|
||||||
)
|
|
||||||
total_count = len(
|
|
||||||
[dupe for dupe in self.__filtered_dupes if self.is_markable(dupe)]
|
|
||||||
)
|
|
||||||
total_size = sum(
|
|
||||||
dupe.size for dupe in self.__filtered_dupes if self.is_markable(dupe)
|
|
||||||
)
|
|
||||||
if self.mark_inverted:
|
if self.mark_inverted:
|
||||||
marked_size = self.__total_size - marked_size
|
marked_size = self.__total_size - marked_size
|
||||||
result = tr("%d / %d (%s / %s) duplicates marked.") % (
|
result = tr("%d / %d (%s / %s) duplicates marked.") % (
|
||||||
@@ -199,11 +191,7 @@ class Results(Markable):
|
|||||||
self.__filters.append(filter_str)
|
self.__filters.append(filter_str)
|
||||||
if self.__filtered_dupes is None:
|
if self.__filtered_dupes is None:
|
||||||
self.__filtered_dupes = flatten(g[:] for g in self.groups)
|
self.__filtered_dupes = flatten(g[:] for g in self.groups)
|
||||||
self.__filtered_dupes = set(
|
self.__filtered_dupes = set(dupe for dupe in self.__filtered_dupes if filter_re.search(str(dupe.path)))
|
||||||
dupe
|
|
||||||
for dupe in self.__filtered_dupes
|
|
||||||
if filter_re.search(str(dupe.path))
|
|
||||||
)
|
|
||||||
filtered_groups = set()
|
filtered_groups = set()
|
||||||
for dupe in self.__filtered_dupes:
|
for dupe in self.__filtered_dupes:
|
||||||
filtered_groups.add(self.get_group_of_duplicate(dupe))
|
filtered_groups.add(self.get_group_of_duplicate(dupe))
|
||||||
@@ -215,8 +203,7 @@ class Results(Markable):
|
|||||||
self.__dupes = None
|
self.__dupes = None
|
||||||
|
|
||||||
def get_group_of_duplicate(self, dupe):
|
def get_group_of_duplicate(self, dupe):
|
||||||
"""Returns :class:`~core.engine.Group` in which ``dupe`` belongs.
|
"""Returns :class:`~core.engine.Group` in which ``dupe`` belongs."""
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
return self.__group_of_duplicate[dupe]
|
return self.__group_of_duplicate[dupe]
|
||||||
except (TypeError, KeyError):
|
except (TypeError, KeyError):
|
||||||
@@ -282,8 +269,7 @@ class Results(Markable):
|
|||||||
self.is_modified = False
|
self.is_modified = False
|
||||||
|
|
||||||
def make_ref(self, dupe):
|
def make_ref(self, dupe):
|
||||||
"""Make ``dupe`` take the :attr:`~core.engine.Group.ref` position of its group.
|
"""Make ``dupe`` take the :attr:`~core.engine.Group.ref` position of its group."""
|
||||||
"""
|
|
||||||
g = self.get_group_of_duplicate(dupe)
|
g = self.get_group_of_duplicate(dupe)
|
||||||
r = g.ref
|
r = g.ref
|
||||||
if not g.switch_ref(dupe):
|
if not g.switch_ref(dupe):
|
||||||
@@ -410,10 +396,10 @@ class Results(Markable):
|
|||||||
"""
|
"""
|
||||||
if not self.__dupes:
|
if not self.__dupes:
|
||||||
self.__get_dupe_list()
|
self.__get_dupe_list()
|
||||||
keyfunc = lambda d: self.app._get_dupe_sort_key(
|
self.__dupes.sort(
|
||||||
d, lambda: self.get_group_of_duplicate(d), key, delta
|
key=lambda d: self.app._get_dupe_sort_key(d, lambda: self.get_group_of_duplicate(d), key, delta),
|
||||||
|
reverse=not asc,
|
||||||
)
|
)
|
||||||
self.__dupes.sort(key=keyfunc, reverse=not asc)
|
|
||||||
self.__dupes_sort_descriptor = (key, asc, delta)
|
self.__dupes_sort_descriptor = (key, asc, delta)
|
||||||
|
|
||||||
def sort_groups(self, key, asc=True):
|
def sort_groups(self, key, asc=True):
|
||||||
@@ -424,8 +410,7 @@ class Results(Markable):
|
|||||||
:param str key: key attribute name to sort with.
|
:param str key: key attribute name to sort with.
|
||||||
:param bool asc: If false, sorting is reversed.
|
:param bool asc: If false, sorting is reversed.
|
||||||
"""
|
"""
|
||||||
keyfunc = lambda g: self.app._get_group_sort_key(g, key)
|
self.groups.sort(key=lambda g: self.app._get_group_sort_key(g, key), reverse=not asc)
|
||||||
self.groups.sort(key=keyfunc, reverse=not asc)
|
|
||||||
self.__groups_sort_descriptor = (key, asc)
|
self.__groups_sort_descriptor = (key, asc)
|
||||||
|
|
||||||
# ---Properties
|
# ---Properties
|
||||||
|
|||||||
@@ -21,16 +21,16 @@ from . import engine
|
|||||||
|
|
||||||
|
|
||||||
class ScanType:
|
class ScanType:
|
||||||
Filename = 0
|
FILENAME = 0
|
||||||
Fields = 1
|
FIELDS = 1
|
||||||
FieldsNoOrder = 2
|
FIELDSNOORDER = 2
|
||||||
Tag = 3
|
TAG = 3
|
||||||
Folders = 4
|
FOLDERS = 4
|
||||||
Contents = 5
|
CONTENTS = 5
|
||||||
|
|
||||||
# PE
|
# PE
|
||||||
FuzzyBlock = 10
|
FUZZYBLOCK = 10
|
||||||
ExifTimestamp = 11
|
EXIFTIMESTAMP = 11
|
||||||
|
|
||||||
|
|
||||||
ScanOption = namedtuple("ScanOption", "scan_type label")
|
ScanOption = namedtuple("ScanOption", "scan_type label")
|
||||||
@@ -78,29 +78,29 @@ class Scanner:
|
|||||||
|
|
||||||
def _getmatches(self, files, j):
|
def _getmatches(self, files, j):
|
||||||
if self.size_threshold or self.scan_type in {
|
if self.size_threshold or self.scan_type in {
|
||||||
ScanType.Contents,
|
ScanType.CONTENTS,
|
||||||
ScanType.Folders,
|
ScanType.FOLDERS,
|
||||||
}:
|
}:
|
||||||
j = j.start_subjob([2, 8])
|
j = j.start_subjob([2, 8])
|
||||||
for f in j.iter_with_progress(files, tr("Read size of %d/%d files")):
|
for f in j.iter_with_progress(files, tr("Read size of %d/%d files")):
|
||||||
f.size # pre-read, makes a smoother progress if read here (especially for bundles)
|
f.size # pre-read, makes a smoother progress if read here (especially for bundles)
|
||||||
if self.size_threshold:
|
if self.size_threshold:
|
||||||
files = [f for f in files if f.size >= self.size_threshold]
|
files = [f for f in files if f.size >= self.size_threshold]
|
||||||
if self.scan_type in {ScanType.Contents, ScanType.Folders}:
|
if self.scan_type in {ScanType.CONTENTS, ScanType.FOLDERS}:
|
||||||
return engine.getmatches_by_contents(files, j=j)
|
return engine.getmatches_by_contents(files, bigsize=self.big_file_size_threshold, j=j)
|
||||||
else:
|
else:
|
||||||
j = j.start_subjob([2, 8])
|
j = j.start_subjob([2, 8])
|
||||||
kw = {}
|
kw = {}
|
||||||
kw["match_similar_words"] = self.match_similar_words
|
kw["match_similar_words"] = self.match_similar_words
|
||||||
kw["weight_words"] = self.word_weighting
|
kw["weight_words"] = self.word_weighting
|
||||||
kw["min_match_percentage"] = self.min_match_percentage
|
kw["min_match_percentage"] = self.min_match_percentage
|
||||||
if self.scan_type == ScanType.FieldsNoOrder:
|
if self.scan_type == ScanType.FIELDSNOORDER:
|
||||||
self.scan_type = ScanType.Fields
|
self.scan_type = ScanType.FIELDS
|
||||||
kw["no_field_order"] = True
|
kw["no_field_order"] = True
|
||||||
func = {
|
func = {
|
||||||
ScanType.Filename: lambda f: engine.getwords(rem_file_ext(f.name)),
|
ScanType.FILENAME: lambda f: engine.getwords(rem_file_ext(f.name)),
|
||||||
ScanType.Fields: lambda f: engine.getfields(rem_file_ext(f.name)),
|
ScanType.FIELDS: lambda f: engine.getfields(rem_file_ext(f.name)),
|
||||||
ScanType.Tag: lambda f: [
|
ScanType.TAG: lambda f: [
|
||||||
engine.getwords(str(getattr(f, attrname)))
|
engine.getwords(str(getattr(f, attrname)))
|
||||||
for attrname in SCANNABLE_TAGS
|
for attrname in SCANNABLE_TAGS
|
||||||
if attrname in self.scanned_tags
|
if attrname in self.scanned_tags
|
||||||
@@ -150,7 +150,7 @@ class Scanner:
|
|||||||
# "duplicated duplicates if you will). Then, we also don't want mixed file kinds if the
|
# "duplicated duplicates if you will). Then, we also don't want mixed file kinds if the
|
||||||
# option isn't enabled, we want matches for which both files exist and, lastly, we don't
|
# option isn't enabled, we want matches for which both files exist and, lastly, we don't
|
||||||
# want matches with both files as ref.
|
# want matches with both files as ref.
|
||||||
if self.scan_type == ScanType.Folders and matches:
|
if self.scan_type == ScanType.FOLDERS and matches:
|
||||||
allpath = {m.first.path for m in matches}
|
allpath = {m.first.path for m in matches}
|
||||||
allpath |= {m.second.path for m in matches}
|
allpath |= {m.second.path for m in matches}
|
||||||
sortedpaths = sorted(allpath)
|
sortedpaths = sorted(allpath)
|
||||||
@@ -161,38 +161,22 @@ class Scanner:
|
|||||||
toremove.add(p)
|
toremove.add(p)
|
||||||
else:
|
else:
|
||||||
last_parent_path = p
|
last_parent_path = p
|
||||||
matches = [
|
matches = [m for m in matches if m.first.path not in toremove or m.second.path not in toremove]
|
||||||
m
|
|
||||||
for m in matches
|
|
||||||
if m.first.path not in toremove or m.second.path not in toremove
|
|
||||||
]
|
|
||||||
if not self.mix_file_kind:
|
if not self.mix_file_kind:
|
||||||
matches = [
|
matches = [m for m in matches if get_file_ext(m.first.name) == get_file_ext(m.second.name)]
|
||||||
m
|
matches = [m for m in matches if m.first.path.exists() and m.second.path.exists()]
|
||||||
for m in matches
|
|
||||||
if get_file_ext(m.first.name) == get_file_ext(m.second.name)
|
|
||||||
]
|
|
||||||
matches = [
|
|
||||||
m for m in matches if m.first.path.exists() and m.second.path.exists()
|
|
||||||
]
|
|
||||||
matches = [m for m in matches if not (m.first.is_ref and m.second.is_ref)]
|
matches = [m for m in matches if not (m.first.is_ref and m.second.is_ref)]
|
||||||
if ignore_list:
|
if ignore_list:
|
||||||
matches = [
|
matches = [m for m in matches if not ignore_list.are_ignored(str(m.first.path), str(m.second.path))]
|
||||||
m
|
|
||||||
for m in matches
|
|
||||||
if not ignore_list.AreIgnored(str(m.first.path), str(m.second.path))
|
|
||||||
]
|
|
||||||
logging.info("Grouping matches")
|
logging.info("Grouping matches")
|
||||||
groups = engine.get_groups(matches)
|
groups = engine.get_groups(matches)
|
||||||
if self.scan_type in {
|
if self.scan_type in {
|
||||||
ScanType.Filename,
|
ScanType.FILENAME,
|
||||||
ScanType.Fields,
|
ScanType.FIELDS,
|
||||||
ScanType.FieldsNoOrder,
|
ScanType.FIELDSNOORDER,
|
||||||
ScanType.Tag,
|
ScanType.TAG,
|
||||||
}:
|
}:
|
||||||
matched_files = dedupe(
|
matched_files = dedupe([m.first for m in matches] + [m.second for m in matches])
|
||||||
[m.first for m in matches] + [m.second for m in matches]
|
|
||||||
)
|
|
||||||
self.discarded_file_count = len(matched_files) - sum(len(g) for g in groups)
|
self.discarded_file_count = len(matched_files) - sum(len(g) for g in groups)
|
||||||
else:
|
else:
|
||||||
# Ticket #195
|
# Ticket #195
|
||||||
@@ -215,7 +199,8 @@ class Scanner:
|
|||||||
match_similar_words = False
|
match_similar_words = False
|
||||||
min_match_percentage = 80
|
min_match_percentage = 80
|
||||||
mix_file_kind = True
|
mix_file_kind = True
|
||||||
scan_type = ScanType.Filename
|
scan_type = ScanType.FILENAME
|
||||||
scanned_tags = {"artist", "title"}
|
scanned_tags = {"artist", "title"}
|
||||||
size_threshold = 0
|
size_threshold = 0
|
||||||
|
big_file_size_threshold = 0
|
||||||
word_weighting = False
|
word_weighting = False
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ class ScannerSE(ScannerBase):
|
|||||||
@staticmethod
|
@staticmethod
|
||||||
def get_scan_options():
|
def get_scan_options():
|
||||||
return [
|
return [
|
||||||
ScanOption(ScanType.Filename, tr("Filename")),
|
ScanOption(ScanType.FILENAME, tr("Filename")),
|
||||||
ScanOption(ScanType.Contents, tr("Contents")),
|
ScanOption(ScanType.CONTENTS, tr("Contents")),
|
||||||
ScanOption(ScanType.Folders, tr("Folders")),
|
ScanOption(ScanType.FOLDERS, tr("Folders")),
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -23,15 +23,13 @@ from ..scanner import ScanType
|
|||||||
|
|
||||||
def add_fake_files_to_directories(directories, files):
|
def add_fake_files_to_directories(directories, files):
|
||||||
directories.get_files = lambda j=None: iter(files)
|
directories.get_files = lambda j=None: iter(files)
|
||||||
directories._dirs.append("this is just so Scan() doesnt return 3")
|
directories._dirs.append("this is just so Scan() doesn't return 3")
|
||||||
|
|
||||||
|
|
||||||
class TestCaseDupeGuru:
|
class TestCaseDupeGuru:
|
||||||
def test_apply_filter_calls_results_apply_filter(self, monkeypatch):
|
def test_apply_filter_calls_results_apply_filter(self, monkeypatch):
|
||||||
dgapp = TestApp().app
|
dgapp = TestApp().app
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(dgapp.results, "apply_filter", log_calls(dgapp.results.apply_filter))
|
||||||
dgapp.results, "apply_filter", log_calls(dgapp.results.apply_filter)
|
|
||||||
)
|
|
||||||
dgapp.apply_filter("foo")
|
dgapp.apply_filter("foo")
|
||||||
eq_(2, len(dgapp.results.apply_filter.calls))
|
eq_(2, len(dgapp.results.apply_filter.calls))
|
||||||
call = dgapp.results.apply_filter.calls[0]
|
call = dgapp.results.apply_filter.calls[0]
|
||||||
@@ -41,15 +39,11 @@ class TestCaseDupeGuru:
|
|||||||
|
|
||||||
def test_apply_filter_escapes_regexp(self, monkeypatch):
|
def test_apply_filter_escapes_regexp(self, monkeypatch):
|
||||||
dgapp = TestApp().app
|
dgapp = TestApp().app
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(dgapp.results, "apply_filter", log_calls(dgapp.results.apply_filter))
|
||||||
dgapp.results, "apply_filter", log_calls(dgapp.results.apply_filter)
|
|
||||||
)
|
|
||||||
dgapp.apply_filter("()[]\\.|+?^abc")
|
dgapp.apply_filter("()[]\\.|+?^abc")
|
||||||
call = dgapp.results.apply_filter.calls[1]
|
call = dgapp.results.apply_filter.calls[1]
|
||||||
eq_("\\(\\)\\[\\]\\\\\\.\\|\\+\\?\\^abc", call["filter_str"])
|
eq_("\\(\\)\\[\\]\\\\\\.\\|\\+\\?\\^abc", call["filter_str"])
|
||||||
dgapp.apply_filter(
|
dgapp.apply_filter("(*)") # In "simple mode", we want the * to behave as a wildcard
|
||||||
"(*)"
|
|
||||||
) # In "simple mode", we want the * to behave as a wilcard
|
|
||||||
call = dgapp.results.apply_filter.calls[3]
|
call = dgapp.results.apply_filter.calls[3]
|
||||||
eq_(r"\(.*\)", call["filter_str"])
|
eq_(r"\(.*\)", call["filter_str"])
|
||||||
dgapp.options["escape_filter_regexp"] = False
|
dgapp.options["escape_filter_regexp"] = False
|
||||||
@@ -70,9 +64,7 @@ class TestCaseDupeGuru:
|
|||||||
)
|
)
|
||||||
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
||||||
monkeypatch.setattr(app, "smart_copy", hscommon.conflict.smart_copy)
|
monkeypatch.setattr(app, "smart_copy", hscommon.conflict.smart_copy)
|
||||||
monkeypatch.setattr(
|
monkeypatch.setattr(os, "makedirs", lambda path: None) # We don't want the test to create that fake directory
|
||||||
os, "makedirs", lambda path: None
|
|
||||||
) # We don't want the test to create that fake directory
|
|
||||||
dgapp = TestApp().app
|
dgapp = TestApp().app
|
||||||
dgapp.directories.add_path(p)
|
dgapp.directories.add_path(p)
|
||||||
[f] = dgapp.directories.get_files()
|
[f] = dgapp.directories.get_files()
|
||||||
@@ -96,14 +88,14 @@ class TestCaseDupeGuru:
|
|||||||
eq_(1, len(calls))
|
eq_(1, len(calls))
|
||||||
eq_(sourcepath, calls[0]["path"])
|
eq_(sourcepath, calls[0]["path"])
|
||||||
|
|
||||||
def test_Scan_with_objects_evaluating_to_false(self):
|
def test_scan_with_objects_evaluating_to_false(self):
|
||||||
class FakeFile(fs.File):
|
class FakeFile(fs.File):
|
||||||
def __bool__(self):
|
def __bool__(self):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# At some point, any() was used in a wrong way that made Scan() wrongly return 1
|
# At some point, any() was used in a wrong way that made Scan() wrongly return 1
|
||||||
app = TestApp().app
|
app = TestApp().app
|
||||||
f1, f2 = [FakeFile("foo") for i in range(2)]
|
f1, f2 = [FakeFile("foo") for _ in range(2)]
|
||||||
f1.is_ref, f2.is_ref = (False, False)
|
f1.is_ref, f2.is_ref = (False, False)
|
||||||
assert not (bool(f1) and bool(f2))
|
assert not (bool(f1) and bool(f2))
|
||||||
add_fake_files_to_directories(app.directories, [f1, f2])
|
add_fake_files_to_directories(app.directories, [f1, f2])
|
||||||
@@ -118,7 +110,7 @@ class TestCaseDupeGuru:
|
|||||||
os.link(str(tmppath["myfile"]), str(tmppath["hardlink"]))
|
os.link(str(tmppath["myfile"]), str(tmppath["hardlink"]))
|
||||||
app = TestApp().app
|
app = TestApp().app
|
||||||
app.directories.add_path(tmppath)
|
app.directories.add_path(tmppath)
|
||||||
app.options["scan_type"] = ScanType.Contents
|
app.options["scan_type"] = ScanType.CONTENTS
|
||||||
app.options["ignore_hardlink_matches"] = True
|
app.options["ignore_hardlink_matches"] = True
|
||||||
app.start_scanning()
|
app.start_scanning()
|
||||||
eq_(len(app.results.groups), 0)
|
eq_(len(app.results.groups), 0)
|
||||||
@@ -132,7 +124,7 @@ class TestCaseDupeGuru:
|
|||||||
assert not dgapp.result_table.rename_selected("foo") # no crash
|
assert not dgapp.result_table.rename_selected("foo") # no crash
|
||||||
|
|
||||||
|
|
||||||
class TestCaseDupeGuru_clean_empty_dirs:
|
class TestCaseDupeGuruCleanEmptyDirs:
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def do_setup(self, request):
|
def do_setup(self, request):
|
||||||
monkeypatch = request.getfixturevalue("monkeypatch")
|
monkeypatch = request.getfixturevalue("monkeypatch")
|
||||||
@@ -192,7 +184,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
tmppath["bar"].mkdir()
|
tmppath["bar"].mkdir()
|
||||||
self.app.directories.add_path(tmppath)
|
self.app.directories.add_path(tmppath)
|
||||||
|
|
||||||
def test_GetObjects(self, do_setup):
|
def test_get_objects(self, do_setup):
|
||||||
objects = self.objects
|
objects = self.objects
|
||||||
groups = self.groups
|
groups = self.groups
|
||||||
r = self.rtable[0]
|
r = self.rtable[0]
|
||||||
@@ -205,7 +197,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
assert r._group is groups[1]
|
assert r._group is groups[1]
|
||||||
assert r._dupe is objects[4]
|
assert r._dupe is objects[4]
|
||||||
|
|
||||||
def test_GetObjects_after_sort(self, do_setup):
|
def test_get_objects_after_sort(self, do_setup):
|
||||||
objects = self.objects
|
objects = self.objects
|
||||||
groups = self.groups[:] # we need an un-sorted reference
|
groups = self.groups[:] # we need an un-sorted reference
|
||||||
self.rtable.sort("name", False)
|
self.rtable.sort("name", False)
|
||||||
@@ -220,7 +212,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
# The first 2 dupes have been removed. The 3rd one is a ref. it stays there, in first pos.
|
# The first 2 dupes have been removed. The 3rd one is a ref. it stays there, in first pos.
|
||||||
eq_(self.rtable.selected_indexes, [1]) # no exception
|
eq_(self.rtable.selected_indexes, [1]) # no exception
|
||||||
|
|
||||||
def test_selectResultNodePaths(self, do_setup):
|
def test_select_result_node_paths(self, do_setup):
|
||||||
app = self.app
|
app = self.app
|
||||||
objects = self.objects
|
objects = self.objects
|
||||||
self.rtable.select([1, 2])
|
self.rtable.select([1, 2])
|
||||||
@@ -228,7 +220,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
assert app.selected_dupes[0] is objects[1]
|
assert app.selected_dupes[0] is objects[1]
|
||||||
assert app.selected_dupes[1] is objects[2]
|
assert app.selected_dupes[1] is objects[2]
|
||||||
|
|
||||||
def test_selectResultNodePaths_with_ref(self, do_setup):
|
def test_select_result_node_paths_with_ref(self, do_setup):
|
||||||
app = self.app
|
app = self.app
|
||||||
objects = self.objects
|
objects = self.objects
|
||||||
self.rtable.select([1, 2, 3])
|
self.rtable.select([1, 2, 3])
|
||||||
@@ -237,7 +229,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
assert app.selected_dupes[1] is objects[2]
|
assert app.selected_dupes[1] is objects[2]
|
||||||
assert app.selected_dupes[2] is self.groups[1].ref
|
assert app.selected_dupes[2] is self.groups[1].ref
|
||||||
|
|
||||||
def test_selectResultNodePaths_after_sort(self, do_setup):
|
def test_select_result_node_paths_after_sort(self, do_setup):
|
||||||
app = self.app
|
app = self.app
|
||||||
objects = self.objects
|
objects = self.objects
|
||||||
groups = self.groups[:] # To keep the old order in memory
|
groups = self.groups[:] # To keep the old order in memory
|
||||||
@@ -264,7 +256,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
app.remove_selected()
|
app.remove_selected()
|
||||||
eq_(self.rtable.selected_indexes, []) # no exception
|
eq_(self.rtable.selected_indexes, []) # no exception
|
||||||
|
|
||||||
def test_selectPowerMarkerRows_after_sort(self, do_setup):
|
def test_select_powermarker_rows_after_sort(self, do_setup):
|
||||||
app = self.app
|
app = self.app
|
||||||
objects = self.objects
|
objects = self.objects
|
||||||
self.rtable.power_marker = True
|
self.rtable.power_marker = True
|
||||||
@@ -303,7 +295,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
app.toggle_selected_mark_state()
|
app.toggle_selected_mark_state()
|
||||||
eq_(app.results.mark_count, 0)
|
eq_(app.results.mark_count, 0)
|
||||||
|
|
||||||
def test_refreshDetailsWithSelected(self, do_setup):
|
def test_refresh_details_with_selected(self, do_setup):
|
||||||
self.rtable.select([1, 4])
|
self.rtable.select([1, 4])
|
||||||
eq_(self.dpanel.row(0), ("Filename", "bar bleh", "foo bar"))
|
eq_(self.dpanel.row(0), ("Filename", "bar bleh", "foo bar"))
|
||||||
self.dpanel.view.check_gui_calls(["refresh"])
|
self.dpanel.view.check_gui_calls(["refresh"])
|
||||||
@@ -311,7 +303,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
eq_(self.dpanel.row(0), ("Filename", "---", "---"))
|
eq_(self.dpanel.row(0), ("Filename", "---", "---"))
|
||||||
self.dpanel.view.check_gui_calls(["refresh"])
|
self.dpanel.view.check_gui_calls(["refresh"])
|
||||||
|
|
||||||
def test_makeSelectedReference(self, do_setup):
|
def test_make_selected_reference(self, do_setup):
|
||||||
app = self.app
|
app = self.app
|
||||||
objects = self.objects
|
objects = self.objects
|
||||||
groups = self.groups
|
groups = self.groups
|
||||||
@@ -320,9 +312,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
assert groups[0].ref is objects[1]
|
assert groups[0].ref is objects[1]
|
||||||
assert groups[1].ref is objects[4]
|
assert groups[1].ref is objects[4]
|
||||||
|
|
||||||
def test_makeSelectedReference_by_selecting_two_dupes_in_the_same_group(
|
def test_make_selected_reference_by_selecting_two_dupes_in_the_same_group(self, do_setup):
|
||||||
self, do_setup
|
|
||||||
):
|
|
||||||
app = self.app
|
app = self.app
|
||||||
objects = self.objects
|
objects = self.objects
|
||||||
groups = self.groups
|
groups = self.groups
|
||||||
@@ -332,7 +322,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
assert groups[0].ref is objects[1]
|
assert groups[0].ref is objects[1]
|
||||||
assert groups[1].ref is objects[4]
|
assert groups[1].ref is objects[4]
|
||||||
|
|
||||||
def test_removeSelected(self, do_setup):
|
def test_remove_selected(self, do_setup):
|
||||||
app = self.app
|
app = self.app
|
||||||
self.rtable.select([1, 4])
|
self.rtable.select([1, 4])
|
||||||
app.remove_selected()
|
app.remove_selected()
|
||||||
@@ -340,7 +330,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
app.remove_selected()
|
app.remove_selected()
|
||||||
eq_(len(app.results.dupes), 0)
|
eq_(len(app.results.dupes), 0)
|
||||||
|
|
||||||
def test_addDirectory_simple(self, do_setup):
|
def test_add_directory_simple(self, do_setup):
|
||||||
# There's already a directory in self.app, so adding another once makes 2 of em
|
# There's already a directory in self.app, so adding another once makes 2 of em
|
||||||
app = self.app
|
app = self.app
|
||||||
# any other path that isn't a parent or child of the already added path
|
# any other path that isn't a parent or child of the already added path
|
||||||
@@ -348,7 +338,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
app.add_directory(otherpath)
|
app.add_directory(otherpath)
|
||||||
eq_(len(app.directories), 2)
|
eq_(len(app.directories), 2)
|
||||||
|
|
||||||
def test_addDirectory_already_there(self, do_setup):
|
def test_add_directory_already_there(self, do_setup):
|
||||||
app = self.app
|
app = self.app
|
||||||
otherpath = Path(op.dirname(__file__))
|
otherpath = Path(op.dirname(__file__))
|
||||||
app.add_directory(otherpath)
|
app.add_directory(otherpath)
|
||||||
@@ -356,7 +346,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
eq_(len(app.view.messages), 1)
|
eq_(len(app.view.messages), 1)
|
||||||
assert "already" in app.view.messages[0]
|
assert "already" in app.view.messages[0]
|
||||||
|
|
||||||
def test_addDirectory_does_not_exist(self, do_setup):
|
def test_add_directory_does_not_exist(self, do_setup):
|
||||||
app = self.app
|
app = self.app
|
||||||
app.add_directory("/does_not_exist")
|
app.add_directory("/does_not_exist")
|
||||||
eq_(len(app.view.messages), 1)
|
eq_(len(app.view.messages), 1)
|
||||||
@@ -372,30 +362,30 @@ class TestCaseDupeGuruWithResults:
|
|||||||
# BOTH the ref and the other dupe should have been added
|
# BOTH the ref and the other dupe should have been added
|
||||||
eq_(len(app.ignore_list), 3)
|
eq_(len(app.ignore_list), 3)
|
||||||
|
|
||||||
def test_purgeIgnoreList(self, do_setup, tmpdir):
|
def test_purge_ignorelist(self, do_setup, tmpdir):
|
||||||
app = self.app
|
app = self.app
|
||||||
p1 = str(tmpdir.join("file1"))
|
p1 = str(tmpdir.join("file1"))
|
||||||
p2 = str(tmpdir.join("file2"))
|
p2 = str(tmpdir.join("file2"))
|
||||||
open(p1, "w").close()
|
open(p1, "w").close()
|
||||||
open(p2, "w").close()
|
open(p2, "w").close()
|
||||||
dne = "/does_not_exist"
|
dne = "/does_not_exist"
|
||||||
app.ignore_list.Ignore(dne, p1)
|
app.ignore_list.ignore(dne, p1)
|
||||||
app.ignore_list.Ignore(p2, dne)
|
app.ignore_list.ignore(p2, dne)
|
||||||
app.ignore_list.Ignore(p1, p2)
|
app.ignore_list.ignore(p1, p2)
|
||||||
app.purge_ignore_list()
|
app.purge_ignore_list()
|
||||||
eq_(1, len(app.ignore_list))
|
eq_(1, len(app.ignore_list))
|
||||||
assert app.ignore_list.AreIgnored(p1, p2)
|
assert app.ignore_list.are_ignored(p1, p2)
|
||||||
assert not app.ignore_list.AreIgnored(dne, p1)
|
assert not app.ignore_list.are_ignored(dne, p1)
|
||||||
|
|
||||||
def test_only_unicode_is_added_to_ignore_list(self, do_setup):
|
def test_only_unicode_is_added_to_ignore_list(self, do_setup):
|
||||||
def FakeIgnore(first, second):
|
def fake_ignore(first, second):
|
||||||
if not isinstance(first, str):
|
if not isinstance(first, str):
|
||||||
self.fail()
|
self.fail()
|
||||||
if not isinstance(second, str):
|
if not isinstance(second, str):
|
||||||
self.fail()
|
self.fail()
|
||||||
|
|
||||||
app = self.app
|
app = self.app
|
||||||
app.ignore_list.Ignore = FakeIgnore
|
app.ignore_list.ignore = fake_ignore
|
||||||
self.rtable.select([4])
|
self.rtable.select([4])
|
||||||
app.add_selected_to_ignore_list()
|
app.add_selected_to_ignore_list()
|
||||||
|
|
||||||
@@ -404,9 +394,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
# results table.
|
# results table.
|
||||||
app = self.app
|
app = self.app
|
||||||
app.JOB = Job(1, lambda *args, **kw: False) # Cancels the task
|
app.JOB = Job(1, lambda *args, **kw: False) # Cancels the task
|
||||||
add_fake_files_to_directories(
|
add_fake_files_to_directories(app.directories, self.objects) # We want the scan to at least start
|
||||||
app.directories, self.objects
|
|
||||||
) # We want the scan to at least start
|
|
||||||
app.start_scanning() # will be cancelled immediately
|
app.start_scanning() # will be cancelled immediately
|
||||||
eq_(len(app.result_table), 0)
|
eq_(len(app.result_table), 0)
|
||||||
|
|
||||||
@@ -431,7 +419,7 @@ class TestCaseDupeGuruWithResults:
|
|||||||
# don't crash
|
# don't crash
|
||||||
|
|
||||||
|
|
||||||
class TestCaseDupeGuru_renameSelected:
|
class TestCaseDupeGuruRenameSelected:
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def do_setup(self, request):
|
def do_setup(self, request):
|
||||||
tmpdir = request.getfixturevalue("tmpdir")
|
tmpdir = request.getfixturevalue("tmpdir")
|
||||||
@@ -514,7 +502,6 @@ class TestAppWithDirectoriesInTree:
|
|||||||
# refreshed.
|
# refreshed.
|
||||||
node = self.dtree[0]
|
node = self.dtree[0]
|
||||||
eq_(len(node), 3) # a len() call is required for subnodes to be loaded
|
eq_(len(node), 3) # a len() call is required for subnodes to be loaded
|
||||||
subnode = node[0]
|
|
||||||
node.state = 1 # the state property is a state index
|
node.state = 1 # the state property is a state index
|
||||||
node = self.dtree[0]
|
node = self.dtree[0]
|
||||||
eq_(len(node), 3)
|
eq_(len(node), 3)
|
||||||
|
|||||||
@@ -88,6 +88,7 @@ class NamedObject:
|
|||||||
self.size = size
|
self.size = size
|
||||||
self.md5partial = name
|
self.md5partial = name
|
||||||
self.md5 = name
|
self.md5 = name
|
||||||
|
self.md5samples = name
|
||||||
if with_words:
|
if with_words:
|
||||||
self.words = getwords(name)
|
self.words = getwords(name)
|
||||||
self.is_ref = False
|
self.is_ref = False
|
||||||
@@ -139,9 +140,7 @@ def GetTestGroups():
|
|||||||
matches = engine.getmatches(objects) # we should have 5 matches
|
matches = engine.getmatches(objects) # we should have 5 matches
|
||||||
groups = engine.get_groups(matches) # We should have 2 groups
|
groups = engine.get_groups(matches) # We should have 2 groups
|
||||||
for g in groups:
|
for g in groups:
|
||||||
g.prioritize(
|
g.prioritize(lambda x: objects.index(x)) # We want the dupes to be in the same order as the list is
|
||||||
lambda x: objects.index(x)
|
|
||||||
) # We want the dupes to be in the same order as the list is
|
|
||||||
groups.sort(key=len, reverse=True) # We want the group with 3 members to be first.
|
groups.sort(key=len, reverse=True) # We want the group with 3 members to be first.
|
||||||
return (objects, matches, groups)
|
return (objects, matches, groups)
|
||||||
|
|
||||||
@@ -152,8 +151,8 @@ class TestApp(TestAppBase):
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
def link_gui(gui):
|
def link_gui(gui):
|
||||||
gui.view = self.make_logger()
|
gui.view = self.make_logger()
|
||||||
if hasattr(gui, "columns"): # tables
|
if hasattr(gui, "_columns"): # tables
|
||||||
gui.columns.view = self.make_logger()
|
gui._columns.view = self.make_logger()
|
||||||
return gui
|
return gui
|
||||||
|
|
||||||
TestAppBase.__init__(self)
|
TestAppBase.__init__(self)
|
||||||
|
|||||||
@@ -14,9 +14,7 @@ except ImportError:
|
|||||||
skip("Can't import the block module, probably hasn't been compiled.")
|
skip("Can't import the block module, probably hasn't been compiled.")
|
||||||
|
|
||||||
|
|
||||||
def my_avgdiff(
|
def my_avgdiff(first, second, limit=768, min_iter=3): # this is so I don't have to re-write every call
|
||||||
first, second, limit=768, min_iter=3
|
|
||||||
): # this is so I don't have to re-write every call
|
|
||||||
return avgdiff(first, second, limit, min_iter)
|
return avgdiff(first, second, limit, min_iter)
|
||||||
|
|
||||||
|
|
||||||
@@ -75,99 +73,6 @@ class TestCasegetblock:
|
|||||||
eq_((meanred, meangreen, meanblue), b)
|
eq_((meanred, meangreen, meanblue), b)
|
||||||
|
|
||||||
|
|
||||||
# class TCdiff(unittest.TestCase):
|
|
||||||
# def test_diff(self):
|
|
||||||
# b1 = (10, 20, 30)
|
|
||||||
# b2 = (1, 2, 3)
|
|
||||||
# eq_(9 + 18 + 27, diff(b1, b2))
|
|
||||||
#
|
|
||||||
# def test_diff_negative(self):
|
|
||||||
# b1 = (10, 20, 30)
|
|
||||||
# b2 = (1, 2, 3)
|
|
||||||
# eq_(9 + 18 + 27, diff(b2, b1))
|
|
||||||
#
|
|
||||||
# def test_diff_mixed_positive_and_negative(self):
|
|
||||||
# b1 = (1, 5, 10)
|
|
||||||
# b2 = (10, 1, 15)
|
|
||||||
# eq_(9 + 4 + 5, diff(b1, b2))
|
|
||||||
#
|
|
||||||
|
|
||||||
# class TCgetblocks(unittest.TestCase):
|
|
||||||
# def test_empty_image(self):
|
|
||||||
# im = empty()
|
|
||||||
# blocks = getblocks(im, 1)
|
|
||||||
# eq_(0, len(blocks))
|
|
||||||
#
|
|
||||||
# def test_one_block_image(self):
|
|
||||||
# im = four_pixels()
|
|
||||||
# blocks = getblocks2(im, 1)
|
|
||||||
# eq_(1, len(blocks))
|
|
||||||
# block = blocks[0]
|
|
||||||
# meanred = (0xff + 0x80) // 4
|
|
||||||
# meangreen = (0x80 + 0x40) // 4
|
|
||||||
# meanblue = (0xff + 0x80) // 4
|
|
||||||
# eq_((meanred, meangreen, meanblue), block)
|
|
||||||
#
|
|
||||||
# def test_not_enough_height_to_fit_a_block(self):
|
|
||||||
# im = FakeImage((2, 1), [BLACK, BLACK])
|
|
||||||
# blocks = getblocks(im, 2)
|
|
||||||
# eq_(0, len(blocks))
|
|
||||||
#
|
|
||||||
# def xtest_dont_include_leftovers(self):
|
|
||||||
# # this test is disabled because getblocks is not used and getblock in cdeffed
|
|
||||||
# pixels = [
|
|
||||||
# RED,(0, 0x80, 0xff), BLACK,
|
|
||||||
# (0x80, 0, 0),(0, 0x40, 0x80), BLACK,
|
|
||||||
# BLACK, BLACK, BLACK
|
|
||||||
# ]
|
|
||||||
# im = FakeImage((3, 3), pixels)
|
|
||||||
# blocks = getblocks(im, 2)
|
|
||||||
# block = blocks[0]
|
|
||||||
# #Because the block is smaller than the image, only blocksize must be considered.
|
|
||||||
# meanred = (0xff + 0x80) // 4
|
|
||||||
# meangreen = (0x80 + 0x40) // 4
|
|
||||||
# meanblue = (0xff + 0x80) // 4
|
|
||||||
# eq_((meanred, meangreen, meanblue), block)
|
|
||||||
#
|
|
||||||
# def xtest_two_blocks(self):
|
|
||||||
# # this test is disabled because getblocks is not used and getblock in cdeffed
|
|
||||||
# pixels = [BLACK for i in xrange(4 * 2)]
|
|
||||||
# pixels[0] = RED
|
|
||||||
# pixels[1] = (0, 0x80, 0xff)
|
|
||||||
# pixels[4] = (0x80, 0, 0)
|
|
||||||
# pixels[5] = (0, 0x40, 0x80)
|
|
||||||
# im = FakeImage((4, 2), pixels)
|
|
||||||
# blocks = getblocks(im, 2)
|
|
||||||
# eq_(2, len(blocks))
|
|
||||||
# block = blocks[0]
|
|
||||||
# #Because the block is smaller than the image, only blocksize must be considered.
|
|
||||||
# meanred = (0xff + 0x80) // 4
|
|
||||||
# meangreen = (0x80 + 0x40) // 4
|
|
||||||
# meanblue = (0xff + 0x80) // 4
|
|
||||||
# eq_((meanred, meangreen, meanblue), block)
|
|
||||||
# eq_(BLACK, blocks[1])
|
|
||||||
#
|
|
||||||
# def test_four_blocks(self):
|
|
||||||
# pixels = [BLACK for i in xrange(4 * 4)]
|
|
||||||
# pixels[0] = RED
|
|
||||||
# pixels[1] = (0, 0x80, 0xff)
|
|
||||||
# pixels[4] = (0x80, 0, 0)
|
|
||||||
# pixels[5] = (0, 0x40, 0x80)
|
|
||||||
# im = FakeImage((4, 4), pixels)
|
|
||||||
# blocks = getblocks2(im, 2)
|
|
||||||
# eq_(4, len(blocks))
|
|
||||||
# block = blocks[0]
|
|
||||||
# #Because the block is smaller than the image, only blocksize must be considered.
|
|
||||||
# meanred = (0xff + 0x80) // 4
|
|
||||||
# meangreen = (0x80 + 0x40) // 4
|
|
||||||
# meanblue = (0xff + 0x80) // 4
|
|
||||||
# eq_((meanred, meangreen, meanblue), block)
|
|
||||||
# eq_(BLACK, blocks[1])
|
|
||||||
# eq_(BLACK, blocks[2])
|
|
||||||
# eq_(BLACK, blocks[3])
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
class TestCasegetblocks2:
|
class TestCasegetblocks2:
|
||||||
def test_empty_image(self):
|
def test_empty_image(self):
|
||||||
im = empty()
|
im = empty()
|
||||||
@@ -272,8 +177,8 @@ class TestCaseavgdiff:
|
|||||||
def test_return_at_least_1_at_the_slightest_difference(self):
|
def test_return_at_least_1_at_the_slightest_difference(self):
|
||||||
ref = (0, 0, 0)
|
ref = (0, 0, 0)
|
||||||
b1 = (1, 0, 0)
|
b1 = (1, 0, 0)
|
||||||
blocks1 = [ref for i in range(250)]
|
blocks1 = [ref for _ in range(250)]
|
||||||
blocks2 = [ref for i in range(250)]
|
blocks2 = [ref for _ in range(250)]
|
||||||
blocks2[0] = b1
|
blocks2[0] = b1
|
||||||
eq_(1, my_avgdiff(blocks1, blocks2))
|
eq_(1, my_avgdiff(blocks1, blocks2))
|
||||||
|
|
||||||
@@ -282,41 +187,3 @@ class TestCaseavgdiff:
|
|||||||
blocks1 = [ref, ref]
|
blocks1 = [ref, ref]
|
||||||
blocks2 = [ref, ref]
|
blocks2 = [ref, ref]
|
||||||
eq_(0, my_avgdiff(blocks1, blocks2))
|
eq_(0, my_avgdiff(blocks1, blocks2))
|
||||||
|
|
||||||
|
|
||||||
# class TCmaxdiff(unittest.TestCase):
|
|
||||||
# def test_empty(self):
|
|
||||||
# self.assertRaises(NoBlocksError, maxdiff,[],[])
|
|
||||||
#
|
|
||||||
# def test_two_blocks(self):
|
|
||||||
# b1 = (5, 10, 15)
|
|
||||||
# b2 = (255, 250, 245)
|
|
||||||
# b3 = (0, 0, 0)
|
|
||||||
# b4 = (255, 0, 255)
|
|
||||||
# blocks1 = [b1, b2]
|
|
||||||
# blocks2 = [b3, b4]
|
|
||||||
# expected1 = 5 + 10 + 15
|
|
||||||
# expected2 = 0 + 250 + 10
|
|
||||||
# expected = max(expected1, expected2)
|
|
||||||
# eq_(expected, maxdiff(blocks1, blocks2))
|
|
||||||
#
|
|
||||||
# def test_blocks_not_the_same_size(self):
|
|
||||||
# b = (0, 0, 0)
|
|
||||||
# self.assertRaises(DifferentBlockCountError, maxdiff,[b, b],[b])
|
|
||||||
#
|
|
||||||
# def test_first_arg_is_empty_but_not_second(self):
|
|
||||||
# #Don't return 0 (as when the 2 lists are empty), raise!
|
|
||||||
# b = (0, 0, 0)
|
|
||||||
# self.assertRaises(DifferentBlockCountError, maxdiff,[],[b])
|
|
||||||
#
|
|
||||||
# def test_limit(self):
|
|
||||||
# b1 = (5, 10, 15)
|
|
||||||
# b2 = (255, 250, 245)
|
|
||||||
# b3 = (0, 0, 0)
|
|
||||||
# b4 = (255, 0, 255)
|
|
||||||
# blocks1 = [b1, b2]
|
|
||||||
# blocks2 = [b3, b4]
|
|
||||||
# expected1 = 5 + 10 + 15
|
|
||||||
# expected2 = 0 + 250 + 10
|
|
||||||
# eq_(expected1, maxdiff(blocks1, blocks2, expected1 - 1))
|
|
||||||
#
|
|
||||||
|
|||||||
@@ -17,7 +17,7 @@ except ImportError:
|
|||||||
skip("Can't import the cache module, probably hasn't been compiled.")
|
skip("Can't import the cache module, probably hasn't been compiled.")
|
||||||
|
|
||||||
|
|
||||||
class TestCasecolors_to_string:
|
class TestCaseColorsToString:
|
||||||
def test_no_color(self):
|
def test_no_color(self):
|
||||||
eq_("", colors_to_string([]))
|
eq_("", colors_to_string([]))
|
||||||
|
|
||||||
@@ -30,7 +30,7 @@ class TestCasecolors_to_string:
|
|||||||
eq_("000102030405", colors_to_string([(0, 1, 2), (3, 4, 5)]))
|
eq_("000102030405", colors_to_string([(0, 1, 2), (3, 4, 5)]))
|
||||||
|
|
||||||
|
|
||||||
class TestCasestring_to_colors:
|
class TestCaseStringToColors:
|
||||||
def test_empty(self):
|
def test_empty(self):
|
||||||
eq_([], string_to_colors(""))
|
eq_([], string_to_colors(""))
|
||||||
|
|
||||||
|
|||||||
@@ -92,7 +92,7 @@ def test_add_path():
|
|||||||
assert p in d
|
assert p in d
|
||||||
|
|
||||||
|
|
||||||
def test_AddPath_when_path_is_already_there():
|
def test_add_path_when_path_is_already_there():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath["onefile"]
|
p = testpath["onefile"]
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
@@ -112,7 +112,7 @@ def test_add_path_containing_paths_already_there():
|
|||||||
eq_(d[0], testpath)
|
eq_(d[0], testpath)
|
||||||
|
|
||||||
|
|
||||||
def test_AddPath_non_latin(tmpdir):
|
def test_add_path_non_latin(tmpdir):
|
||||||
p = Path(str(tmpdir))
|
p = Path(str(tmpdir))
|
||||||
to_add = p["unicode\u201a"]
|
to_add = p["unicode\u201a"]
|
||||||
os.mkdir(str(to_add))
|
os.mkdir(str(to_add))
|
||||||
@@ -140,20 +140,20 @@ def test_states():
|
|||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath["onefile"]
|
p = testpath["onefile"]
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
eq_(DirectoryState.Normal, d.get_state(p))
|
eq_(DirectoryState.NORMAL, d.get_state(p))
|
||||||
d.set_state(p, DirectoryState.Reference)
|
d.set_state(p, DirectoryState.REFERENCE)
|
||||||
eq_(DirectoryState.Reference, d.get_state(p))
|
eq_(DirectoryState.REFERENCE, d.get_state(p))
|
||||||
eq_(DirectoryState.Reference, d.get_state(p["dir1"]))
|
eq_(DirectoryState.REFERENCE, d.get_state(p["dir1"]))
|
||||||
eq_(1, len(d.states))
|
eq_(1, len(d.states))
|
||||||
eq_(p, list(d.states.keys())[0])
|
eq_(p, list(d.states.keys())[0])
|
||||||
eq_(DirectoryState.Reference, d.states[p])
|
eq_(DirectoryState.REFERENCE, d.states[p])
|
||||||
|
|
||||||
|
|
||||||
def test_get_state_with_path_not_there():
|
def test_get_state_with_path_not_there():
|
||||||
# When the path's not there, just return DirectoryState.Normal
|
# When the path's not there, just return DirectoryState.Normal
|
||||||
d = Directories()
|
d = Directories()
|
||||||
d.add_path(testpath["onefile"])
|
d.add_path(testpath["onefile"])
|
||||||
eq_(d.get_state(testpath), DirectoryState.Normal)
|
eq_(d.get_state(testpath), DirectoryState.NORMAL)
|
||||||
|
|
||||||
|
|
||||||
def test_states_overwritten_when_larger_directory_eat_smaller_ones():
|
def test_states_overwritten_when_larger_directory_eat_smaller_ones():
|
||||||
@@ -162,20 +162,20 @@ def test_states_overwritten_when_larger_directory_eat_smaller_ones():
|
|||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath["onefile"]
|
p = testpath["onefile"]
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
d.set_state(p, DirectoryState.Excluded)
|
d.set_state(p, DirectoryState.EXCLUDED)
|
||||||
d.add_path(testpath)
|
d.add_path(testpath)
|
||||||
d.set_state(testpath, DirectoryState.Reference)
|
d.set_state(testpath, DirectoryState.REFERENCE)
|
||||||
eq_(d.get_state(p), DirectoryState.Reference)
|
eq_(d.get_state(p), DirectoryState.REFERENCE)
|
||||||
eq_(d.get_state(p["dir1"]), DirectoryState.Reference)
|
eq_(d.get_state(p["dir1"]), DirectoryState.REFERENCE)
|
||||||
eq_(d.get_state(testpath), DirectoryState.Reference)
|
eq_(d.get_state(testpath), DirectoryState.REFERENCE)
|
||||||
|
|
||||||
|
|
||||||
def test_get_files():
|
def test_get_files():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath["fs"]
|
p = testpath["fs"]
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
d.set_state(p["dir1"], DirectoryState.Reference)
|
d.set_state(p["dir1"], DirectoryState.REFERENCE)
|
||||||
d.set_state(p["dir2"], DirectoryState.Excluded)
|
d.set_state(p["dir2"], DirectoryState.EXCLUDED)
|
||||||
files = list(d.get_files())
|
files = list(d.get_files())
|
||||||
eq_(5, len(files))
|
eq_(5, len(files))
|
||||||
for f in files:
|
for f in files:
|
||||||
@@ -204,8 +204,8 @@ def test_get_folders():
|
|||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath["fs"]
|
p = testpath["fs"]
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
d.set_state(p["dir1"], DirectoryState.Reference)
|
d.set_state(p["dir1"], DirectoryState.REFERENCE)
|
||||||
d.set_state(p["dir2"], DirectoryState.Excluded)
|
d.set_state(p["dir2"], DirectoryState.EXCLUDED)
|
||||||
folders = list(d.get_folders())
|
folders = list(d.get_folders())
|
||||||
eq_(len(folders), 3)
|
eq_(len(folders), 3)
|
||||||
ref = [f for f in folders if f.is_ref]
|
ref = [f for f in folders if f.is_ref]
|
||||||
@@ -220,7 +220,7 @@ def test_get_files_with_inherited_exclusion():
|
|||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath["onefile"]
|
p = testpath["onefile"]
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
d.set_state(p, DirectoryState.Excluded)
|
d.set_state(p, DirectoryState.EXCLUDED)
|
||||||
eq_([], list(d.get_files()))
|
eq_([], list(d.get_files()))
|
||||||
|
|
||||||
|
|
||||||
@@ -233,14 +233,14 @@ def test_save_and_load(tmpdir):
|
|||||||
p2.mkdir()
|
p2.mkdir()
|
||||||
d1.add_path(p1)
|
d1.add_path(p1)
|
||||||
d1.add_path(p2)
|
d1.add_path(p2)
|
||||||
d1.set_state(p1, DirectoryState.Reference)
|
d1.set_state(p1, DirectoryState.REFERENCE)
|
||||||
d1.set_state(p1["dir1"], DirectoryState.Excluded)
|
d1.set_state(p1["dir1"], DirectoryState.EXCLUDED)
|
||||||
tmpxml = str(tmpdir.join("directories_testunit.xml"))
|
tmpxml = str(tmpdir.join("directories_testunit.xml"))
|
||||||
d1.save_to_file(tmpxml)
|
d1.save_to_file(tmpxml)
|
||||||
d2.load_from_file(tmpxml)
|
d2.load_from_file(tmpxml)
|
||||||
eq_(2, len(d2))
|
eq_(2, len(d2))
|
||||||
eq_(DirectoryState.Reference, d2.get_state(p1))
|
eq_(DirectoryState.REFERENCE, d2.get_state(p1))
|
||||||
eq_(DirectoryState.Excluded, d2.get_state(p1["dir1"]))
|
eq_(DirectoryState.EXCLUDED, d2.get_state(p1["dir1"]))
|
||||||
|
|
||||||
|
|
||||||
def test_invalid_path():
|
def test_invalid_path():
|
||||||
@@ -254,7 +254,12 @@ def test_invalid_path():
|
|||||||
def test_set_state_on_invalid_path():
|
def test_set_state_on_invalid_path():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
try:
|
try:
|
||||||
d.set_state(Path("foobar",), DirectoryState.Normal)
|
d.set_state(
|
||||||
|
Path(
|
||||||
|
"foobar",
|
||||||
|
),
|
||||||
|
DirectoryState.NORMAL,
|
||||||
|
)
|
||||||
except LookupError:
|
except LookupError:
|
||||||
assert False
|
assert False
|
||||||
|
|
||||||
@@ -282,7 +287,7 @@ def test_unicode_save(tmpdir):
|
|||||||
p1.mkdir()
|
p1.mkdir()
|
||||||
p1["foo\xe9"].mkdir()
|
p1["foo\xe9"].mkdir()
|
||||||
d.add_path(p1)
|
d.add_path(p1)
|
||||||
d.set_state(p1["foo\xe9"], DirectoryState.Excluded)
|
d.set_state(p1["foo\xe9"], DirectoryState.EXCLUDED)
|
||||||
tmpxml = str(tmpdir.join("directories_testunit.xml"))
|
tmpxml = str(tmpdir.join("directories_testunit.xml"))
|
||||||
try:
|
try:
|
||||||
d.save_to_file(tmpxml)
|
d.save_to_file(tmpxml)
|
||||||
@@ -316,10 +321,10 @@ def test_get_state_returns_excluded_by_default_for_hidden_directories(tmpdir):
|
|||||||
hidden_dir_path = p[".foo"]
|
hidden_dir_path = p[".foo"]
|
||||||
p[".foo"].mkdir()
|
p[".foo"].mkdir()
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
eq_(d.get_state(hidden_dir_path), DirectoryState.Excluded)
|
eq_(d.get_state(hidden_dir_path), DirectoryState.EXCLUDED)
|
||||||
# But it can be overriden
|
# But it can be overriden
|
||||||
d.set_state(hidden_dir_path, DirectoryState.Normal)
|
d.set_state(hidden_dir_path, DirectoryState.NORMAL)
|
||||||
eq_(d.get_state(hidden_dir_path), DirectoryState.Normal)
|
eq_(d.get_state(hidden_dir_path), DirectoryState.NORMAL)
|
||||||
|
|
||||||
|
|
||||||
def test_default_path_state_override(tmpdir):
|
def test_default_path_state_override(tmpdir):
|
||||||
@@ -327,7 +332,7 @@ def test_default_path_state_override(tmpdir):
|
|||||||
class MyDirectories(Directories):
|
class MyDirectories(Directories):
|
||||||
def _default_state_for_path(self, path):
|
def _default_state_for_path(self, path):
|
||||||
if "foobar" in path:
|
if "foobar" in path:
|
||||||
return DirectoryState.Excluded
|
return DirectoryState.EXCLUDED
|
||||||
|
|
||||||
d = MyDirectories()
|
d = MyDirectories()
|
||||||
p1 = Path(str(tmpdir))
|
p1 = Path(str(tmpdir))
|
||||||
@@ -336,24 +341,26 @@ def test_default_path_state_override(tmpdir):
|
|||||||
p1["foobaz"].mkdir()
|
p1["foobaz"].mkdir()
|
||||||
p1["foobaz/somefile"].open("w").close()
|
p1["foobaz/somefile"].open("w").close()
|
||||||
d.add_path(p1)
|
d.add_path(p1)
|
||||||
eq_(d.get_state(p1["foobaz"]), DirectoryState.Normal)
|
eq_(d.get_state(p1["foobaz"]), DirectoryState.NORMAL)
|
||||||
eq_(d.get_state(p1["foobar"]), DirectoryState.Excluded)
|
eq_(d.get_state(p1["foobar"]), DirectoryState.EXCLUDED)
|
||||||
eq_(len(list(d.get_files())), 1) # only the 'foobaz' file is there
|
eq_(len(list(d.get_files())), 1) # only the 'foobaz' file is there
|
||||||
# However, the default state can be changed
|
# However, the default state can be changed
|
||||||
d.set_state(p1["foobar"], DirectoryState.Normal)
|
d.set_state(p1["foobar"], DirectoryState.NORMAL)
|
||||||
eq_(d.get_state(p1["foobar"]), DirectoryState.Normal)
|
eq_(d.get_state(p1["foobar"]), DirectoryState.NORMAL)
|
||||||
eq_(len(list(d.get_files())), 2)
|
eq_(len(list(d.get_files())), 2)
|
||||||
|
|
||||||
|
|
||||||
class TestExcludeList():
|
class TestExcludeList:
|
||||||
def setup_method(self, method):
|
def setup_method(self, method):
|
||||||
self.d = Directories(exclude_list=ExcludeList(union_regex=False))
|
self.d = Directories(exclude_list=ExcludeList(union_regex=False))
|
||||||
|
|
||||||
def get_files_and_expect_num_result(self, num_result):
|
def get_files_and_expect_num_result(self, num_result):
|
||||||
"""Calls get_files(), get the filenames only, print for debugging.
|
"""Calls get_files(), get the filenames only, print for debugging.
|
||||||
num_result is how many files are expected as a result."""
|
num_result is how many files are expected as a result."""
|
||||||
print(f"EXCLUDED REGEX: paths {self.d._exclude_list.compiled_paths} \
|
print(
|
||||||
files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled}")
|
f"EXCLUDED REGEX: paths {self.d._exclude_list.compiled_paths} \
|
||||||
|
files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled}"
|
||||||
|
)
|
||||||
files = list(self.d.get_files())
|
files = list(self.d.get_files())
|
||||||
files = [file.name for file in files]
|
files = [file.name for file in files]
|
||||||
print(f"FINAL FILES {files}")
|
print(f"FINAL FILES {files}")
|
||||||
@@ -368,11 +375,11 @@ files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled
|
|||||||
p1["$Recycle.Bin"].mkdir()
|
p1["$Recycle.Bin"].mkdir()
|
||||||
p1["$Recycle.Bin"]["subdir"].mkdir()
|
p1["$Recycle.Bin"]["subdir"].mkdir()
|
||||||
self.d.add_path(p1)
|
self.d.add_path(p1)
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]), DirectoryState.Excluded)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]), DirectoryState.EXCLUDED)
|
||||||
# By default, subdirs should be excluded too, but this can be overriden separately
|
# By default, subdirs should be excluded too, but this can be overridden separately
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.Excluded)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.EXCLUDED)
|
||||||
self.d.set_state(p1["$Recycle.Bin"]["subdir"], DirectoryState.Normal)
|
self.d.set_state(p1["$Recycle.Bin"]["subdir"], DirectoryState.NORMAL)
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.Normal)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.NORMAL)
|
||||||
|
|
||||||
def test_exclude_refined(self, tmpdir):
|
def test_exclude_refined(self, tmpdir):
|
||||||
regex1 = r"^\$Recycle\.Bin$"
|
regex1 = r"^\$Recycle\.Bin$"
|
||||||
@@ -391,16 +398,16 @@ files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled
|
|||||||
self.d.add_path(p1["$Recycle.Bin"])
|
self.d.add_path(p1["$Recycle.Bin"])
|
||||||
|
|
||||||
# Filter should set the default state to Excluded
|
# Filter should set the default state to Excluded
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]), DirectoryState.Excluded)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]), DirectoryState.EXCLUDED)
|
||||||
# The subdir should inherit its parent state
|
# The subdir should inherit its parent state
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.Excluded)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.EXCLUDED)
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdar"]), DirectoryState.Excluded)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdar"]), DirectoryState.EXCLUDED)
|
||||||
# Override a child path's state
|
# Override a child path's state
|
||||||
self.d.set_state(p1["$Recycle.Bin"]["subdir"], DirectoryState.Normal)
|
self.d.set_state(p1["$Recycle.Bin"]["subdir"], DirectoryState.NORMAL)
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.Normal)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.NORMAL)
|
||||||
# Parent should keep its default state, and the other child too
|
# Parent should keep its default state, and the other child too
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]), DirectoryState.Excluded)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]), DirectoryState.EXCLUDED)
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdar"]), DirectoryState.Excluded)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdar"]), DirectoryState.EXCLUDED)
|
||||||
# print(f"get_folders(): {[x for x in self.d.get_folders()]}")
|
# print(f"get_folders(): {[x for x in self.d.get_folders()]}")
|
||||||
|
|
||||||
# only the 2 files directly under the Normal directory
|
# only the 2 files directly under the Normal directory
|
||||||
@@ -412,8 +419,8 @@ files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled
|
|||||||
assert "somesubdirfile.png" in files
|
assert "somesubdirfile.png" in files
|
||||||
assert "unwanted_subdirfile.gif" in files
|
assert "unwanted_subdirfile.gif" in files
|
||||||
# Overriding the parent should enable all children
|
# Overriding the parent should enable all children
|
||||||
self.d.set_state(p1["$Recycle.Bin"], DirectoryState.Normal)
|
self.d.set_state(p1["$Recycle.Bin"], DirectoryState.NORMAL)
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdar"]), DirectoryState.Normal)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdar"]), DirectoryState.NORMAL)
|
||||||
# all files there
|
# all files there
|
||||||
files = self.get_files_and_expect_num_result(6)
|
files = self.get_files_and_expect_num_result(6)
|
||||||
assert "somefile.png" in files
|
assert "somefile.png" in files
|
||||||
@@ -437,7 +444,7 @@ files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled
|
|||||||
assert self.d._exclude_list.error(regex3) is None
|
assert self.d._exclude_list.error(regex3) is None
|
||||||
# print(f"get_folders(): {[x for x in self.d.get_folders()]}")
|
# print(f"get_folders(): {[x for x in self.d.get_folders()]}")
|
||||||
# Directory shouldn't change its state here, unless explicitely done by user
|
# Directory shouldn't change its state here, unless explicitely done by user
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.Normal)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.NORMAL)
|
||||||
files = self.get_files_and_expect_num_result(5)
|
files = self.get_files_and_expect_num_result(5)
|
||||||
assert "unwanted_subdirfile.gif" not in files
|
assert "unwanted_subdirfile.gif" not in files
|
||||||
assert "unwanted_subdarfile.png" in files
|
assert "unwanted_subdarfile.png" in files
|
||||||
@@ -447,14 +454,14 @@ files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled
|
|||||||
self.d._exclude_list.rename(regex3, regex4)
|
self.d._exclude_list.rename(regex3, regex4)
|
||||||
assert self.d._exclude_list.error(regex4) is None
|
assert self.d._exclude_list.error(regex4) is None
|
||||||
p1["$Recycle.Bin"]["subdar"]["file_ending_with_subdir"].open("w").close()
|
p1["$Recycle.Bin"]["subdar"]["file_ending_with_subdir"].open("w").close()
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.Excluded)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.EXCLUDED)
|
||||||
files = self.get_files_and_expect_num_result(4)
|
files = self.get_files_and_expect_num_result(4)
|
||||||
assert "file_ending_with_subdir" not in files
|
assert "file_ending_with_subdir" not in files
|
||||||
assert "somesubdarfile.jpeg" in files
|
assert "somesubdarfile.jpeg" in files
|
||||||
assert "somesubdirfile.png" not in files
|
assert "somesubdirfile.png" not in files
|
||||||
assert "unwanted_subdirfile.gif" not in files
|
assert "unwanted_subdirfile.gif" not in files
|
||||||
self.d.set_state(p1["$Recycle.Bin"]["subdir"], DirectoryState.Normal)
|
self.d.set_state(p1["$Recycle.Bin"]["subdir"], DirectoryState.NORMAL)
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.Normal)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.NORMAL)
|
||||||
# print(f"get_folders(): {[x for x in self.d.get_folders()]}")
|
# print(f"get_folders(): {[x for x in self.d.get_folders()]}")
|
||||||
files = self.get_files_and_expect_num_result(6)
|
files = self.get_files_and_expect_num_result(6)
|
||||||
assert "file_ending_with_subdir" not in files
|
assert "file_ending_with_subdir" not in files
|
||||||
@@ -464,7 +471,7 @@ files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled
|
|||||||
regex5 = r".*subdir.*"
|
regex5 = r".*subdir.*"
|
||||||
self.d._exclude_list.rename(regex4, regex5)
|
self.d._exclude_list.rename(regex4, regex5)
|
||||||
# Files containing substring should be filtered
|
# Files containing substring should be filtered
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.Normal)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.NORMAL)
|
||||||
# The path should not match, only the filename, the "subdir" in the directory name shouldn't matter
|
# The path should not match, only the filename, the "subdir" in the directory name shouldn't matter
|
||||||
p1["$Recycle.Bin"]["subdir"]["file_which_shouldnt_match"].open("w").close()
|
p1["$Recycle.Bin"]["subdir"]["file_which_shouldnt_match"].open("w").close()
|
||||||
files = self.get_files_and_expect_num_result(5)
|
files = self.get_files_and_expect_num_result(5)
|
||||||
@@ -473,6 +480,29 @@ files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled
|
|||||||
assert "file_ending_with_subdir" not in files
|
assert "file_ending_with_subdir" not in files
|
||||||
assert "file_which_shouldnt_match" in files
|
assert "file_which_shouldnt_match" in files
|
||||||
|
|
||||||
|
# This should match the directory only
|
||||||
|
regex6 = r".*/.*subdir.*/.*"
|
||||||
|
if ISWINDOWS:
|
||||||
|
regex6 = r".*\\.*subdir.*\\.*"
|
||||||
|
assert os.sep in regex6
|
||||||
|
self.d._exclude_list.rename(regex5, regex6)
|
||||||
|
self.d._exclude_list.remove(regex1)
|
||||||
|
eq_(len(self.d._exclude_list.compiled), 1)
|
||||||
|
assert regex1 not in self.d._exclude_list
|
||||||
|
assert regex5 not in self.d._exclude_list
|
||||||
|
assert self.d._exclude_list.error(regex6) is None
|
||||||
|
assert regex6 in self.d._exclude_list
|
||||||
|
# This still should not be affected
|
||||||
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["subdir"]), DirectoryState.NORMAL)
|
||||||
|
files = self.get_files_and_expect_num_result(5)
|
||||||
|
# These files are under the "/subdir" directory
|
||||||
|
assert "somesubdirfile.png" not in files
|
||||||
|
assert "unwanted_subdirfile.gif" not in files
|
||||||
|
# This file under "subdar" directory should not be filtered out
|
||||||
|
assert "file_ending_with_subdir" in files
|
||||||
|
# This file is in a directory that should be filtered out
|
||||||
|
assert "file_which_shouldnt_match" not in files
|
||||||
|
|
||||||
def test_japanese_unicode(self, tmpdir):
|
def test_japanese_unicode(self, tmpdir):
|
||||||
p1 = Path(str(tmpdir))
|
p1 = Path(str(tmpdir))
|
||||||
p1["$Recycle.Bin"].mkdir()
|
p1["$Recycle.Bin"].mkdir()
|
||||||
@@ -488,7 +518,7 @@ files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled
|
|||||||
self.d._exclude_list.add(regex3)
|
self.d._exclude_list.add(regex3)
|
||||||
self.d._exclude_list.mark(regex3)
|
self.d._exclude_list.mark(regex3)
|
||||||
# print(f"get_folders(): {[x for x in self.d.get_folders()]}")
|
# print(f"get_folders(): {[x for x in self.d.get_folders()]}")
|
||||||
eq_(self.d.get_state(p1["$Recycle.Bin"]["思叫物語"]), DirectoryState.Excluded)
|
eq_(self.d.get_state(p1["$Recycle.Bin"]["思叫物語"]), DirectoryState.EXCLUDED)
|
||||||
files = self.get_files_and_expect_num_result(2)
|
files = self.get_files_and_expect_num_result(2)
|
||||||
assert "過去白濁物語~]_カラー.jpg" not in files
|
assert "過去白濁物語~]_カラー.jpg" not in files
|
||||||
assert "なししろ会う前" not in files
|
assert "なししろ会う前" not in files
|
||||||
@@ -497,7 +527,7 @@ files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled
|
|||||||
regex4 = r".*物語$"
|
regex4 = r".*物語$"
|
||||||
self.d._exclude_list.rename(regex3, regex4)
|
self.d._exclude_list.rename(regex3, regex4)
|
||||||
assert self.d._exclude_list.error(regex4) is None
|
assert self.d._exclude_list.error(regex4) is None
|
||||||
self.d.set_state(p1["$Recycle.Bin"]["思叫物語"], DirectoryState.Normal)
|
self.d.set_state(p1["$Recycle.Bin"]["思叫物語"], DirectoryState.NORMAL)
|
||||||
files = self.get_files_and_expect_num_result(5)
|
files = self.get_files_and_expect_num_result(5)
|
||||||
assert "過去白濁物語~]_カラー.jpg" in files
|
assert "過去白濁物語~]_カラー.jpg" in files
|
||||||
assert "なししろ会う前" in files
|
assert "なししろ会う前" in files
|
||||||
@@ -516,8 +546,8 @@ files: {self.d._exclude_list.compiled_files} all: {self.d._exclude_list.compiled
|
|||||||
p1["foobar"][".hidden_dir"][".hidden_subfile.png"].open("w").close()
|
p1["foobar"][".hidden_dir"][".hidden_subfile.png"].open("w").close()
|
||||||
self.d.add_path(p1["foobar"])
|
self.d.add_path(p1["foobar"])
|
||||||
# It should not inherit its parent's state originally
|
# It should not inherit its parent's state originally
|
||||||
eq_(self.d.get_state(p1["foobar"][".hidden_dir"]), DirectoryState.Excluded)
|
eq_(self.d.get_state(p1["foobar"][".hidden_dir"]), DirectoryState.EXCLUDED)
|
||||||
self.d.set_state(p1["foobar"][".hidden_dir"], DirectoryState.Normal)
|
self.d.set_state(p1["foobar"][".hidden_dir"], DirectoryState.NORMAL)
|
||||||
# The files should still be filtered
|
# The files should still be filtered
|
||||||
files = self.get_files_and_expect_num_result(1)
|
files = self.get_files_and_expect_num_result(1)
|
||||||
eq_(len(self.d._exclude_list.compiled_paths), 0)
|
eq_(len(self.d._exclude_list.compiled_paths), 0)
|
||||||
|
|||||||
@@ -69,6 +69,10 @@ class TestCasegetwords:
|
|||||||
eq_(["a", "b", "c", "d"], getwords("a b c d"))
|
eq_(["a", "b", "c", "d"], getwords("a b c d"))
|
||||||
eq_(["a", "b", "c", "d"], getwords(" a b c d "))
|
eq_(["a", "b", "c", "d"], getwords(" a b c d "))
|
||||||
|
|
||||||
|
def test_unicode(self):
|
||||||
|
eq_(["e", "c", "0", "a", "o", "u", "e", "u"], getwords("é ç 0 à ö û è ¤ ù"))
|
||||||
|
eq_(["02", "君のこころは輝いてるかい?", "国木田花丸", "solo", "ver"], getwords("02 君のこころは輝いてるかい? 国木田花丸 Solo Ver"))
|
||||||
|
|
||||||
def test_splitter_chars(self):
|
def test_splitter_chars(self):
|
||||||
eq_(
|
eq_(
|
||||||
[chr(i) for i in range(ord("a"), ord("z") + 1)],
|
[chr(i) for i in range(ord("a"), ord("z") + 1)],
|
||||||
@@ -85,7 +89,7 @@ class TestCasegetwords:
|
|||||||
eq_(["foo", "bar"], getwords("FOO BAR"))
|
eq_(["foo", "bar"], getwords("FOO BAR"))
|
||||||
|
|
||||||
def test_decompose_unicode(self):
|
def test_decompose_unicode(self):
|
||||||
eq_(getwords("foo\xe9bar"), ["fooebar"])
|
eq_(["fooebar"], getwords("foo\xe9bar"))
|
||||||
|
|
||||||
|
|
||||||
class TestCasegetfields:
|
class TestCasegetfields:
|
||||||
@@ -99,10 +103,9 @@ class TestCasegetfields:
|
|||||||
expected = [["a", "bc", "def"]]
|
expected = [["a", "bc", "def"]]
|
||||||
actual = getfields(" - a bc def")
|
actual = getfields(" - a bc def")
|
||||||
eq_(expected, actual)
|
eq_(expected, actual)
|
||||||
expected = [["bc", "def"]]
|
|
||||||
|
|
||||||
|
|
||||||
class TestCaseunpack_fields:
|
class TestCaseUnpackFields:
|
||||||
def test_with_fields(self):
|
def test_with_fields(self):
|
||||||
expected = ["a", "b", "c", "d", "e", "f"]
|
expected = ["a", "b", "c", "d", "e", "f"]
|
||||||
actual = unpack_fields([["a"], ["b", "c"], ["d", "e", "f"]])
|
actual = unpack_fields([["a"], ["b", "c"], ["d", "e", "f"]])
|
||||||
@@ -173,9 +176,7 @@ class TestCaseWordCompareWithFields:
|
|||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
eq_(
|
eq_(
|
||||||
67,
|
67,
|
||||||
compare_fields(
|
compare_fields([["a", "b"], ["c", "d", "e"]], [["a", "b"], ["c", "d", "f"]]),
|
||||||
[["a", "b"], ["c", "d", "e"]], [["a", "b"], ["c", "d", "f"]]
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
def test_empty(self):
|
def test_empty(self):
|
||||||
@@ -216,24 +217,24 @@ class TestCaseWordCompareWithFields:
|
|||||||
eq_([["c", "d", "f"], ["a", "b"]], second)
|
eq_([["c", "d", "f"], ["a", "b"]], second)
|
||||||
|
|
||||||
|
|
||||||
class TestCasebuild_word_dict:
|
class TestCaseBuildWordDict:
|
||||||
def test_with_standard_words(self):
|
def test_with_standard_words(self):
|
||||||
itemList = [NamedObject("foo bar", True)]
|
item_list = [NamedObject("foo bar", True)]
|
||||||
itemList.append(NamedObject("bar baz", True))
|
item_list.append(NamedObject("bar baz", True))
|
||||||
itemList.append(NamedObject("baz bleh foo", True))
|
item_list.append(NamedObject("baz bleh foo", True))
|
||||||
d = build_word_dict(itemList)
|
d = build_word_dict(item_list)
|
||||||
eq_(4, len(d))
|
eq_(4, len(d))
|
||||||
eq_(2, len(d["foo"]))
|
eq_(2, len(d["foo"]))
|
||||||
assert itemList[0] in d["foo"]
|
assert item_list[0] in d["foo"]
|
||||||
assert itemList[2] in d["foo"]
|
assert item_list[2] in d["foo"]
|
||||||
eq_(2, len(d["bar"]))
|
eq_(2, len(d["bar"]))
|
||||||
assert itemList[0] in d["bar"]
|
assert item_list[0] in d["bar"]
|
||||||
assert itemList[1] in d["bar"]
|
assert item_list[1] in d["bar"]
|
||||||
eq_(2, len(d["baz"]))
|
eq_(2, len(d["baz"]))
|
||||||
assert itemList[1] in d["baz"]
|
assert item_list[1] in d["baz"]
|
||||||
assert itemList[2] in d["baz"]
|
assert item_list[2] in d["baz"]
|
||||||
eq_(1, len(d["bleh"]))
|
eq_(1, len(d["bleh"]))
|
||||||
assert itemList[2] in d["bleh"]
|
assert item_list[2] in d["bleh"]
|
||||||
|
|
||||||
def test_unpack_fields(self):
|
def test_unpack_fields(self):
|
||||||
o = NamedObject("")
|
o = NamedObject("")
|
||||||
@@ -261,15 +262,13 @@ class TestCasebuild_word_dict:
|
|||||||
j = job.Job(1, do_progress)
|
j = job.Job(1, do_progress)
|
||||||
self.log = []
|
self.log = []
|
||||||
s = "foo bar"
|
s = "foo bar"
|
||||||
build_word_dict(
|
build_word_dict([NamedObject(s, True), NamedObject(s, True), NamedObject(s, True)], j)
|
||||||
[NamedObject(s, True), NamedObject(s, True), NamedObject(s, True)], j
|
|
||||||
)
|
|
||||||
# We don't have intermediate log because iter_with_progress is called with every > 1
|
# We don't have intermediate log because iter_with_progress is called with every > 1
|
||||||
eq_(0, self.log[0])
|
eq_(0, self.log[0])
|
||||||
eq_(100, self.log[1])
|
eq_(100, self.log[1])
|
||||||
|
|
||||||
|
|
||||||
class TestCasemerge_similar_words:
|
class TestCaseMergeSimilarWords:
|
||||||
def test_some_similar_words(self):
|
def test_some_similar_words(self):
|
||||||
d = {
|
d = {
|
||||||
"foobar": set([1]),
|
"foobar": set([1]),
|
||||||
@@ -281,11 +280,11 @@ class TestCasemerge_similar_words:
|
|||||||
eq_(3, len(d["foobar"]))
|
eq_(3, len(d["foobar"]))
|
||||||
|
|
||||||
|
|
||||||
class TestCasereduce_common_words:
|
class TestCaseReduceCommonWords:
|
||||||
def test_typical(self):
|
def test_typical(self):
|
||||||
d = {
|
d = {
|
||||||
"foo": set([NamedObject("foo bar", True) for i in range(50)]),
|
"foo": set([NamedObject("foo bar", True) for _ in range(50)]),
|
||||||
"bar": set([NamedObject("foo bar", True) for i in range(49)]),
|
"bar": set([NamedObject("foo bar", True) for _ in range(49)]),
|
||||||
}
|
}
|
||||||
reduce_common_words(d, 50)
|
reduce_common_words(d, 50)
|
||||||
assert "foo" not in d
|
assert "foo" not in d
|
||||||
@@ -293,10 +292,7 @@ class TestCasereduce_common_words:
|
|||||||
|
|
||||||
def test_dont_remove_objects_with_only_common_words(self):
|
def test_dont_remove_objects_with_only_common_words(self):
|
||||||
d = {
|
d = {
|
||||||
"common": set(
|
"common": set([NamedObject("common uncommon", True) for _ in range(50)] + [NamedObject("common", True)]),
|
||||||
[NamedObject("common uncommon", True) for i in range(50)]
|
|
||||||
+ [NamedObject("common", True)]
|
|
||||||
),
|
|
||||||
"uncommon": set([NamedObject("common uncommon", True)]),
|
"uncommon": set([NamedObject("common uncommon", True)]),
|
||||||
}
|
}
|
||||||
reduce_common_words(d, 50)
|
reduce_common_words(d, 50)
|
||||||
@@ -305,23 +301,20 @@ class TestCasereduce_common_words:
|
|||||||
|
|
||||||
def test_values_still_are_set_instances(self):
|
def test_values_still_are_set_instances(self):
|
||||||
d = {
|
d = {
|
||||||
"common": set(
|
"common": set([NamedObject("common uncommon", True) for _ in range(50)] + [NamedObject("common", True)]),
|
||||||
[NamedObject("common uncommon", True) for i in range(50)]
|
|
||||||
+ [NamedObject("common", True)]
|
|
||||||
),
|
|
||||||
"uncommon": set([NamedObject("common uncommon", True)]),
|
"uncommon": set([NamedObject("common uncommon", True)]),
|
||||||
}
|
}
|
||||||
reduce_common_words(d, 50)
|
reduce_common_words(d, 50)
|
||||||
assert isinstance(d["common"], set)
|
assert isinstance(d["common"], set)
|
||||||
assert isinstance(d["uncommon"], set)
|
assert isinstance(d["uncommon"], set)
|
||||||
|
|
||||||
def test_dont_raise_KeyError_when_a_word_has_been_removed(self):
|
def test_dont_raise_keyerror_when_a_word_has_been_removed(self):
|
||||||
# If a word has been removed by the reduce, an object in a subsequent common word that
|
# If a word has been removed by the reduce, an object in a subsequent common word that
|
||||||
# contains the word that has been removed would cause a KeyError.
|
# contains the word that has been removed would cause a KeyError.
|
||||||
d = {
|
d = {
|
||||||
"foo": set([NamedObject("foo bar baz", True) for i in range(50)]),
|
"foo": set([NamedObject("foo bar baz", True) for _ in range(50)]),
|
||||||
"bar": set([NamedObject("foo bar baz", True) for i in range(50)]),
|
"bar": set([NamedObject("foo bar baz", True) for _ in range(50)]),
|
||||||
"baz": set([NamedObject("foo bar baz", True) for i in range(49)]),
|
"baz": set([NamedObject("foo bar baz", True) for _ in range(49)]),
|
||||||
}
|
}
|
||||||
try:
|
try:
|
||||||
reduce_common_words(d, 50)
|
reduce_common_words(d, 50)
|
||||||
@@ -335,7 +328,7 @@ class TestCasereduce_common_words:
|
|||||||
o.words = [["foo", "bar"], ["baz"]]
|
o.words = [["foo", "bar"], ["baz"]]
|
||||||
return o
|
return o
|
||||||
|
|
||||||
d = {"foo": set([create_it() for i in range(50)])}
|
d = {"foo": set([create_it() for _ in range(50)])}
|
||||||
try:
|
try:
|
||||||
reduce_common_words(d, 50)
|
reduce_common_words(d, 50)
|
||||||
except TypeError:
|
except TypeError:
|
||||||
@@ -348,13 +341,9 @@ class TestCasereduce_common_words:
|
|||||||
# would not stay in 'bar' because 'foo' is not a common word anymore.
|
# would not stay in 'bar' because 'foo' is not a common word anymore.
|
||||||
only_common = NamedObject("foo bar", True)
|
only_common = NamedObject("foo bar", True)
|
||||||
d = {
|
d = {
|
||||||
"foo": set(
|
"foo": set([NamedObject("foo bar baz", True) for _ in range(49)] + [only_common]),
|
||||||
[NamedObject("foo bar baz", True) for i in range(49)] + [only_common]
|
"bar": set([NamedObject("foo bar baz", True) for _ in range(49)] + [only_common]),
|
||||||
),
|
"baz": set([NamedObject("foo bar baz", True) for _ in range(49)]),
|
||||||
"bar": set(
|
|
||||||
[NamedObject("foo bar baz", True) for i in range(49)] + [only_common]
|
|
||||||
),
|
|
||||||
"baz": set([NamedObject("foo bar baz", True) for i in range(49)]),
|
|
||||||
}
|
}
|
||||||
reduce_common_words(d, 50)
|
reduce_common_words(d, 50)
|
||||||
eq_(1, len(d["foo"]))
|
eq_(1, len(d["foo"]))
|
||||||
@@ -362,7 +351,7 @@ class TestCasereduce_common_words:
|
|||||||
eq_(49, len(d["baz"]))
|
eq_(49, len(d["baz"]))
|
||||||
|
|
||||||
|
|
||||||
class TestCaseget_match:
|
class TestCaseGetMatch:
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
o1 = NamedObject("foo bar", True)
|
o1 = NamedObject("foo bar", True)
|
||||||
o2 = NamedObject("bar bleh", True)
|
o2 = NamedObject("bar bleh", True)
|
||||||
@@ -382,9 +371,7 @@ class TestCaseget_match:
|
|||||||
assert object() not in m
|
assert object() not in m
|
||||||
|
|
||||||
def test_word_weight(self):
|
def test_word_weight(self):
|
||||||
m = get_match(
|
m = get_match(NamedObject("foo bar", True), NamedObject("bar bleh", True), (WEIGHT_WORDS,))
|
||||||
NamedObject("foo bar", True), NamedObject("bar bleh", True), (WEIGHT_WORDS,)
|
|
||||||
)
|
|
||||||
eq_(m.percentage, int((6.0 / 13.0) * 100))
|
eq_(m.percentage, int((6.0 / 13.0) * 100))
|
||||||
|
|
||||||
|
|
||||||
@@ -393,12 +380,12 @@ class TestCaseGetMatches:
|
|||||||
eq_(getmatches([]), [])
|
eq_(getmatches([]), [])
|
||||||
|
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
itemList = [
|
item_list = [
|
||||||
NamedObject("foo bar"),
|
NamedObject("foo bar"),
|
||||||
NamedObject("bar bleh"),
|
NamedObject("bar bleh"),
|
||||||
NamedObject("a b c foo"),
|
NamedObject("a b c foo"),
|
||||||
]
|
]
|
||||||
r = getmatches(itemList)
|
r = getmatches(item_list)
|
||||||
eq_(2, len(r))
|
eq_(2, len(r))
|
||||||
m = first(m for m in r if m.percentage == 50) # "foo bar" and "bar bleh"
|
m = first(m for m in r if m.percentage == 50) # "foo bar" and "bar bleh"
|
||||||
assert_match(m, "foo bar", "bar bleh")
|
assert_match(m, "foo bar", "bar bleh")
|
||||||
@@ -406,40 +393,40 @@ class TestCaseGetMatches:
|
|||||||
assert_match(m, "foo bar", "a b c foo")
|
assert_match(m, "foo bar", "a b c foo")
|
||||||
|
|
||||||
def test_null_and_unrelated_objects(self):
|
def test_null_and_unrelated_objects(self):
|
||||||
itemList = [
|
item_list = [
|
||||||
NamedObject("foo bar"),
|
NamedObject("foo bar"),
|
||||||
NamedObject("bar bleh"),
|
NamedObject("bar bleh"),
|
||||||
NamedObject(""),
|
NamedObject(""),
|
||||||
NamedObject("unrelated object"),
|
NamedObject("unrelated object"),
|
||||||
]
|
]
|
||||||
r = getmatches(itemList)
|
r = getmatches(item_list)
|
||||||
eq_(len(r), 1)
|
eq_(len(r), 1)
|
||||||
m = r[0]
|
m = r[0]
|
||||||
eq_(m.percentage, 50)
|
eq_(m.percentage, 50)
|
||||||
assert_match(m, "foo bar", "bar bleh")
|
assert_match(m, "foo bar", "bar bleh")
|
||||||
|
|
||||||
def test_twice_the_same_word(self):
|
def test_twice_the_same_word(self):
|
||||||
itemList = [NamedObject("foo foo bar"), NamedObject("bar bleh")]
|
item_list = [NamedObject("foo foo bar"), NamedObject("bar bleh")]
|
||||||
r = getmatches(itemList)
|
r = getmatches(item_list)
|
||||||
eq_(1, len(r))
|
eq_(1, len(r))
|
||||||
|
|
||||||
def test_twice_the_same_word_when_preworded(self):
|
def test_twice_the_same_word_when_preworded(self):
|
||||||
itemList = [NamedObject("foo foo bar", True), NamedObject("bar bleh", True)]
|
item_list = [NamedObject("foo foo bar", True), NamedObject("bar bleh", True)]
|
||||||
r = getmatches(itemList)
|
r = getmatches(item_list)
|
||||||
eq_(1, len(r))
|
eq_(1, len(r))
|
||||||
|
|
||||||
def test_two_words_match(self):
|
def test_two_words_match(self):
|
||||||
itemList = [NamedObject("foo bar"), NamedObject("foo bar bleh")]
|
item_list = [NamedObject("foo bar"), NamedObject("foo bar bleh")]
|
||||||
r = getmatches(itemList)
|
r = getmatches(item_list)
|
||||||
eq_(1, len(r))
|
eq_(1, len(r))
|
||||||
|
|
||||||
def test_match_files_with_only_common_words(self):
|
def test_match_files_with_only_common_words(self):
|
||||||
# If a word occurs more than 50 times, it is excluded from the matching process
|
# If a word occurs more than 50 times, it is excluded from the matching process
|
||||||
# The problem with the common_word_threshold is that the files containing only common
|
# The problem with the common_word_threshold is that the files containing only common
|
||||||
# words will never be matched together. We *should* match them.
|
# words will never be matched together. We *should* match them.
|
||||||
# This test assumes that the common word threashold const is 50
|
# This test assumes that the common word threshold const is 50
|
||||||
itemList = [NamedObject("foo") for i in range(50)]
|
item_list = [NamedObject("foo") for _ in range(50)]
|
||||||
r = getmatches(itemList)
|
r = getmatches(item_list)
|
||||||
eq_(1225, len(r))
|
eq_(1225, len(r))
|
||||||
|
|
||||||
def test_use_words_already_there_if_there(self):
|
def test_use_words_already_there_if_there(self):
|
||||||
@@ -462,28 +449,28 @@ class TestCaseGetMatches:
|
|||||||
eq_(100, self.log[-1])
|
eq_(100, self.log[-1])
|
||||||
|
|
||||||
def test_weight_words(self):
|
def test_weight_words(self):
|
||||||
itemList = [NamedObject("foo bar"), NamedObject("bar bleh")]
|
item_list = [NamedObject("foo bar"), NamedObject("bar bleh")]
|
||||||
m = getmatches(itemList, weight_words=True)[0]
|
m = getmatches(item_list, weight_words=True)[0]
|
||||||
eq_(int((6.0 / 13.0) * 100), m.percentage)
|
eq_(int((6.0 / 13.0) * 100), m.percentage)
|
||||||
|
|
||||||
def test_similar_word(self):
|
def test_similar_word(self):
|
||||||
itemList = [NamedObject("foobar"), NamedObject("foobars")]
|
item_list = [NamedObject("foobar"), NamedObject("foobars")]
|
||||||
eq_(len(getmatches(itemList, match_similar_words=True)), 1)
|
eq_(len(getmatches(item_list, match_similar_words=True)), 1)
|
||||||
eq_(getmatches(itemList, match_similar_words=True)[0].percentage, 100)
|
eq_(getmatches(item_list, match_similar_words=True)[0].percentage, 100)
|
||||||
itemList = [NamedObject("foobar"), NamedObject("foo")]
|
item_list = [NamedObject("foobar"), NamedObject("foo")]
|
||||||
eq_(len(getmatches(itemList, match_similar_words=True)), 0) # too far
|
eq_(len(getmatches(item_list, match_similar_words=True)), 0) # too far
|
||||||
itemList = [NamedObject("bizkit"), NamedObject("bizket")]
|
item_list = [NamedObject("bizkit"), NamedObject("bizket")]
|
||||||
eq_(len(getmatches(itemList, match_similar_words=True)), 1)
|
eq_(len(getmatches(item_list, match_similar_words=True)), 1)
|
||||||
itemList = [NamedObject("foobar"), NamedObject("foosbar")]
|
item_list = [NamedObject("foobar"), NamedObject("foosbar")]
|
||||||
eq_(len(getmatches(itemList, match_similar_words=True)), 1)
|
eq_(len(getmatches(item_list, match_similar_words=True)), 1)
|
||||||
|
|
||||||
def test_single_object_with_similar_words(self):
|
def test_single_object_with_similar_words(self):
|
||||||
itemList = [NamedObject("foo foos")]
|
item_list = [NamedObject("foo foos")]
|
||||||
eq_(len(getmatches(itemList, match_similar_words=True)), 0)
|
eq_(len(getmatches(item_list, match_similar_words=True)), 0)
|
||||||
|
|
||||||
def test_double_words_get_counted_only_once(self):
|
def test_double_words_get_counted_only_once(self):
|
||||||
itemList = [NamedObject("foo bar foo bleh"), NamedObject("foo bar bleh bar")]
|
item_list = [NamedObject("foo bar foo bleh"), NamedObject("foo bar bleh bar")]
|
||||||
m = getmatches(itemList)[0]
|
m = getmatches(item_list)[0]
|
||||||
eq_(75, m.percentage)
|
eq_(75, m.percentage)
|
||||||
|
|
||||||
def test_with_fields(self):
|
def test_with_fields(self):
|
||||||
@@ -503,13 +490,13 @@ class TestCaseGetMatches:
|
|||||||
eq_(m.percentage, 50)
|
eq_(m.percentage, 50)
|
||||||
|
|
||||||
def test_only_match_similar_when_the_option_is_set(self):
|
def test_only_match_similar_when_the_option_is_set(self):
|
||||||
itemList = [NamedObject("foobar"), NamedObject("foobars")]
|
item_list = [NamedObject("foobar"), NamedObject("foobars")]
|
||||||
eq_(len(getmatches(itemList, match_similar_words=False)), 0)
|
eq_(len(getmatches(item_list, match_similar_words=False)), 0)
|
||||||
|
|
||||||
def test_dont_recurse_do_match(self):
|
def test_dont_recurse_do_match(self):
|
||||||
# with nosetests, the stack is increased. The number has to be high enough not to be failing falsely
|
# with nosetests, the stack is increased. The number has to be high enough not to be failing falsely
|
||||||
sys.setrecursionlimit(200)
|
sys.setrecursionlimit(200)
|
||||||
files = [NamedObject("foo bar") for i in range(201)]
|
files = [NamedObject("foo bar") for _ in range(201)]
|
||||||
try:
|
try:
|
||||||
getmatches(files)
|
getmatches(files)
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
@@ -518,27 +505,27 @@ class TestCaseGetMatches:
|
|||||||
sys.setrecursionlimit(1000)
|
sys.setrecursionlimit(1000)
|
||||||
|
|
||||||
def test_min_match_percentage(self):
|
def test_min_match_percentage(self):
|
||||||
itemList = [
|
item_list = [
|
||||||
NamedObject("foo bar"),
|
NamedObject("foo bar"),
|
||||||
NamedObject("bar bleh"),
|
NamedObject("bar bleh"),
|
||||||
NamedObject("a b c foo"),
|
NamedObject("a b c foo"),
|
||||||
]
|
]
|
||||||
r = getmatches(itemList, min_match_percentage=50)
|
r = getmatches(item_list, min_match_percentage=50)
|
||||||
eq_(1, len(r)) # Only "foo bar" / "bar bleh" should match
|
eq_(1, len(r)) # Only "foo bar" / "bar bleh" should match
|
||||||
|
|
||||||
def test_MemoryError(self, monkeypatch):
|
def test_memory_error(self, monkeypatch):
|
||||||
@log_calls
|
@log_calls
|
||||||
def mocked_match(first, second, flags):
|
def mocked_match(first, second, flags):
|
||||||
if len(mocked_match.calls) > 42:
|
if len(mocked_match.calls) > 42:
|
||||||
raise MemoryError()
|
raise MemoryError()
|
||||||
return Match(first, second, 0)
|
return Match(first, second, 0)
|
||||||
|
|
||||||
objects = [NamedObject() for i in range(10)] # results in 45 matches
|
objects = [NamedObject() for _ in range(10)] # results in 45 matches
|
||||||
monkeypatch.setattr(engine, "get_match", mocked_match)
|
monkeypatch.setattr(engine, "get_match", mocked_match)
|
||||||
try:
|
try:
|
||||||
r = getmatches(objects)
|
r = getmatches(objects)
|
||||||
except MemoryError:
|
except MemoryError:
|
||||||
self.fail("MemorryError must be handled")
|
self.fail("MemoryError must be handled")
|
||||||
eq_(42, len(r))
|
eq_(42, len(r))
|
||||||
|
|
||||||
|
|
||||||
@@ -547,9 +534,35 @@ class TestCaseGetMatchesByContents:
|
|||||||
o1, o2 = no(size=0), no(size=0)
|
o1, o2 = no(size=0), no(size=0)
|
||||||
assert not getmatches_by_contents([o1, o2])
|
assert not getmatches_by_contents([o1, o2])
|
||||||
|
|
||||||
|
def test_big_file_partial_hashes(self):
|
||||||
|
smallsize = 1
|
||||||
|
bigsize = 100 * 1024 * 1024 # 100MB
|
||||||
|
f = [
|
||||||
|
no("bigfoo", size=bigsize),
|
||||||
|
no("bigbar", size=bigsize),
|
||||||
|
no("smallfoo", size=smallsize),
|
||||||
|
no("smallbar", size=smallsize),
|
||||||
|
]
|
||||||
|
f[0].md5 = f[0].md5partial = f[0].md5samples = "foobar"
|
||||||
|
f[1].md5 = f[1].md5partial = f[1].md5samples = "foobar"
|
||||||
|
f[2].md5 = f[2].md5partial = "bleh"
|
||||||
|
f[3].md5 = f[3].md5partial = "bleh"
|
||||||
|
r = getmatches_by_contents(f, bigsize=bigsize)
|
||||||
|
eq_(len(r), 2)
|
||||||
|
# User disabled optimization for big files, compute hashes as usual
|
||||||
|
r = getmatches_by_contents(f, bigsize=0)
|
||||||
|
eq_(len(r), 2)
|
||||||
|
# Other file is now slightly different, md5partial is still the same
|
||||||
|
f[1].md5 = f[1].md5samples = "foobardiff"
|
||||||
|
r = getmatches_by_contents(f, bigsize=bigsize)
|
||||||
|
# Successfully filter it out
|
||||||
|
eq_(len(r), 1)
|
||||||
|
r = getmatches_by_contents(f, bigsize=0)
|
||||||
|
eq_(len(r), 1)
|
||||||
|
|
||||||
|
|
||||||
class TestCaseGroup:
|
class TestCaseGroup:
|
||||||
def test_empy(self):
|
def test_empty(self):
|
||||||
g = Group()
|
g = Group()
|
||||||
eq_(None, g.ref)
|
eq_(None, g.ref)
|
||||||
eq_([], g.dupes)
|
eq_([], g.dupes)
|
||||||
@@ -723,8 +736,7 @@ class TestCaseGroup:
|
|||||||
# if the ref has the same key as one or more of the dupe, run the tie_breaker func among them
|
# if the ref has the same key as one or more of the dupe, run the tie_breaker func among them
|
||||||
g = get_test_group()
|
g = get_test_group()
|
||||||
o1, o2, o3 = g.ordered
|
o1, o2, o3 = g.ordered
|
||||||
tie_breaker = lambda ref, dupe: dupe is o3
|
g.prioritize(lambda x: 0, lambda ref, dupe: dupe is o3)
|
||||||
g.prioritize(lambda x: 0, tie_breaker)
|
|
||||||
assert g.ref is o3
|
assert g.ref is o3
|
||||||
|
|
||||||
def test_prioritize_with_tie_breaker_runs_on_all_dupes(self):
|
def test_prioritize_with_tie_breaker_runs_on_all_dupes(self):
|
||||||
@@ -735,8 +747,7 @@ class TestCaseGroup:
|
|||||||
o1.foo = 1
|
o1.foo = 1
|
||||||
o2.foo = 2
|
o2.foo = 2
|
||||||
o3.foo = 3
|
o3.foo = 3
|
||||||
tie_breaker = lambda ref, dupe: dupe.foo > ref.foo
|
g.prioritize(lambda x: 0, lambda ref, dupe: dupe.foo > ref.foo)
|
||||||
g.prioritize(lambda x: 0, tie_breaker)
|
|
||||||
assert g.ref is o3
|
assert g.ref is o3
|
||||||
|
|
||||||
def test_prioritize_with_tie_breaker_runs_only_on_tie_dupes(self):
|
def test_prioritize_with_tie_breaker_runs_only_on_tie_dupes(self):
|
||||||
@@ -749,9 +760,7 @@ class TestCaseGroup:
|
|||||||
o1.bar = 1
|
o1.bar = 1
|
||||||
o2.bar = 2
|
o2.bar = 2
|
||||||
o3.bar = 3
|
o3.bar = 3
|
||||||
key_func = lambda x: -x.foo
|
g.prioritize(lambda x: -x.foo, lambda ref, dupe: dupe.bar > ref.bar)
|
||||||
tie_breaker = lambda ref, dupe: dupe.bar > ref.bar
|
|
||||||
g.prioritize(key_func, tie_breaker)
|
|
||||||
assert g.ref is o2
|
assert g.ref is o2
|
||||||
|
|
||||||
def test_prioritize_with_ref_dupe(self):
|
def test_prioritize_with_ref_dupe(self):
|
||||||
@@ -792,14 +801,14 @@ class TestCaseGroup:
|
|||||||
eq_(0, len(g.candidates))
|
eq_(0, len(g.candidates))
|
||||||
|
|
||||||
|
|
||||||
class TestCaseget_groups:
|
class TestCaseGetGroups:
|
||||||
def test_empty(self):
|
def test_empty(self):
|
||||||
r = get_groups([])
|
r = get_groups([])
|
||||||
eq_([], r)
|
eq_([], r)
|
||||||
|
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
itemList = [NamedObject("foo bar"), NamedObject("bar bleh")]
|
item_list = [NamedObject("foo bar"), NamedObject("bar bleh")]
|
||||||
matches = getmatches(itemList)
|
matches = getmatches(item_list)
|
||||||
m = matches[0]
|
m = matches[0]
|
||||||
r = get_groups(matches)
|
r = get_groups(matches)
|
||||||
eq_(1, len(r))
|
eq_(1, len(r))
|
||||||
@@ -809,15 +818,15 @@ class TestCaseget_groups:
|
|||||||
|
|
||||||
def test_group_with_multiple_matches(self):
|
def test_group_with_multiple_matches(self):
|
||||||
# This results in 3 matches
|
# This results in 3 matches
|
||||||
itemList = [NamedObject("foo"), NamedObject("foo"), NamedObject("foo")]
|
item_list = [NamedObject("foo"), NamedObject("foo"), NamedObject("foo")]
|
||||||
matches = getmatches(itemList)
|
matches = getmatches(item_list)
|
||||||
r = get_groups(matches)
|
r = get_groups(matches)
|
||||||
eq_(1, len(r))
|
eq_(1, len(r))
|
||||||
g = r[0]
|
g = r[0]
|
||||||
eq_(3, len(g))
|
eq_(3, len(g))
|
||||||
|
|
||||||
def test_must_choose_a_group(self):
|
def test_must_choose_a_group(self):
|
||||||
itemList = [
|
item_list = [
|
||||||
NamedObject("a b"),
|
NamedObject("a b"),
|
||||||
NamedObject("a b"),
|
NamedObject("a b"),
|
||||||
NamedObject("b c"),
|
NamedObject("b c"),
|
||||||
@@ -826,13 +835,13 @@ class TestCaseget_groups:
|
|||||||
]
|
]
|
||||||
# There will be 2 groups here: group "a b" and group "c d"
|
# There will be 2 groups here: group "a b" and group "c d"
|
||||||
# "b c" can go either of them, but not both.
|
# "b c" can go either of them, but not both.
|
||||||
matches = getmatches(itemList)
|
matches = getmatches(item_list)
|
||||||
r = get_groups(matches)
|
r = get_groups(matches)
|
||||||
eq_(2, len(r))
|
eq_(2, len(r))
|
||||||
eq_(5, len(r[0]) + len(r[1]))
|
eq_(5, len(r[0]) + len(r[1]))
|
||||||
|
|
||||||
def test_should_all_go_in_the_same_group(self):
|
def test_should_all_go_in_the_same_group(self):
|
||||||
itemList = [
|
item_list = [
|
||||||
NamedObject("a b"),
|
NamedObject("a b"),
|
||||||
NamedObject("a b"),
|
NamedObject("a b"),
|
||||||
NamedObject("a b"),
|
NamedObject("a b"),
|
||||||
@@ -840,7 +849,7 @@ class TestCaseget_groups:
|
|||||||
]
|
]
|
||||||
# There will be 2 groups here: group "a b" and group "c d"
|
# There will be 2 groups here: group "a b" and group "c d"
|
||||||
# "b c" can fit in both, but it must be in only one of them
|
# "b c" can fit in both, but it must be in only one of them
|
||||||
matches = getmatches(itemList)
|
matches = getmatches(item_list)
|
||||||
r = get_groups(matches)
|
r = get_groups(matches)
|
||||||
eq_(1, len(r))
|
eq_(1, len(r))
|
||||||
|
|
||||||
@@ -859,8 +868,8 @@ class TestCaseget_groups:
|
|||||||
assert o3 in g
|
assert o3 in g
|
||||||
|
|
||||||
def test_four_sized_group(self):
|
def test_four_sized_group(self):
|
||||||
itemList = [NamedObject("foobar") for i in range(4)]
|
item_list = [NamedObject("foobar") for _ in range(4)]
|
||||||
m = getmatches(itemList)
|
m = getmatches(item_list)
|
||||||
r = get_groups(m)
|
r = get_groups(m)
|
||||||
eq_(1, len(r))
|
eq_(1, len(r))
|
||||||
eq_(4, len(r[0]))
|
eq_(4, len(r[0]))
|
||||||
@@ -883,9 +892,7 @@ class TestCaseget_groups:
|
|||||||
m1 = Match(A, B, 90) # This is the strongest "A" match
|
m1 = Match(A, B, 90) # This is the strongest "A" match
|
||||||
m2 = Match(A, C, 80) # Because C doesn't match with B, it won't be in the group
|
m2 = Match(A, C, 80) # Because C doesn't match with B, it won't be in the group
|
||||||
m3 = Match(A, D, 80) # Same thing for D
|
m3 = Match(A, D, 80) # Same thing for D
|
||||||
m4 = Match(
|
m4 = Match(C, D, 70) # However, because C and D match, they should have their own group.
|
||||||
C, D, 70
|
|
||||||
) # However, because C and D match, they should have their own group.
|
|
||||||
groups = get_groups([m1, m2, m3, m4])
|
groups = get_groups([m1, m2, m3, m4])
|
||||||
eq_(len(groups), 2)
|
eq_(len(groups), 2)
|
||||||
g1, g2 = groups
|
g1, g2 = groups
|
||||||
|
|||||||
@@ -5,11 +5,8 @@
|
|||||||
# http://www.gnu.org/licenses/gpl-3.0.html
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
||||||
|
|
||||||
import io
|
import io
|
||||||
# import os.path as op
|
|
||||||
|
|
||||||
from xml.etree import ElementTree as ET
|
from xml.etree import ElementTree as ET
|
||||||
|
|
||||||
# from pytest import raises
|
|
||||||
from hscommon.testutil import eq_
|
from hscommon.testutil import eq_
|
||||||
from hscommon.plat import ISWINDOWS
|
from hscommon.plat import ISWINDOWS
|
||||||
|
|
||||||
@@ -104,7 +101,7 @@ class TestCaseListEmpty:
|
|||||||
regex1 = r"one"
|
regex1 = r"one"
|
||||||
regex2 = r"two"
|
regex2 = r"two"
|
||||||
self.exclude_list.add(regex1)
|
self.exclude_list.add(regex1)
|
||||||
assert(regex1 in self.exclude_list)
|
assert regex1 in self.exclude_list
|
||||||
self.exclude_list.add(regex2)
|
self.exclude_list.add(regex2)
|
||||||
self.exclude_list.mark(regex1)
|
self.exclude_list.mark(regex1)
|
||||||
self.exclude_list.mark(regex2)
|
self.exclude_list.mark(regex2)
|
||||||
@@ -113,17 +110,17 @@ class TestCaseListEmpty:
|
|||||||
compiled_files = [x for x in self.exclude_list.compiled_files]
|
compiled_files = [x for x in self.exclude_list.compiled_files]
|
||||||
eq_(len(compiled_files), 2)
|
eq_(len(compiled_files), 2)
|
||||||
self.exclude_list.remove(regex2)
|
self.exclude_list.remove(regex2)
|
||||||
assert(regex2 not in self.exclude_list)
|
assert regex2 not in self.exclude_list
|
||||||
eq_(len(self.exclude_list), 1)
|
eq_(len(self.exclude_list), 1)
|
||||||
|
|
||||||
def test_add_duplicate(self):
|
def test_add_duplicate(self):
|
||||||
self.exclude_list.add(r"one")
|
self.exclude_list.add(r"one")
|
||||||
eq_(1 , len(self.exclude_list))
|
eq_(1, len(self.exclude_list))
|
||||||
try:
|
try:
|
||||||
self.exclude_list.add(r"one")
|
self.exclude_list.add(r"one")
|
||||||
except Exception:
|
except Exception:
|
||||||
pass
|
pass
|
||||||
eq_(1 , len(self.exclude_list))
|
eq_(1, len(self.exclude_list))
|
||||||
|
|
||||||
def test_add_not_compilable(self):
|
def test_add_not_compilable(self):
|
||||||
# Trying to add a non-valid regex should not work and raise exception
|
# Trying to add a non-valid regex should not work and raise exception
|
||||||
@@ -143,11 +140,7 @@ class TestCaseListEmpty:
|
|||||||
def test_force_add_not_compilable(self):
|
def test_force_add_not_compilable(self):
|
||||||
"""Used when loading from XML for example"""
|
"""Used when loading from XML for example"""
|
||||||
regex = r"one))"
|
regex = r"one))"
|
||||||
try:
|
|
||||||
self.exclude_list.add(regex, forced=True)
|
self.exclude_list.add(regex, forced=True)
|
||||||
except Exception as e:
|
|
||||||
# Should not get an exception here unless it's a duplicate regex
|
|
||||||
raise e
|
|
||||||
marked = self.exclude_list.mark(regex)
|
marked = self.exclude_list.mark(regex)
|
||||||
eq_(marked, False) # can't be marked since not compilable
|
eq_(marked, False) # can't be marked since not compilable
|
||||||
eq_(len(self.exclude_list), 1)
|
eq_(len(self.exclude_list), 1)
|
||||||
@@ -188,6 +181,28 @@ class TestCaseListEmpty:
|
|||||||
self.exclude_list.rename(regex_renamed_compilable, regex_compilable)
|
self.exclude_list.rename(regex_renamed_compilable, regex_compilable)
|
||||||
eq_(self.exclude_list.is_marked(regex_compilable), True)
|
eq_(self.exclude_list.is_marked(regex_compilable), True)
|
||||||
|
|
||||||
|
def test_rename_regex_file_to_path(self):
|
||||||
|
regex = r".*/one.*"
|
||||||
|
if ISWINDOWS:
|
||||||
|
regex = r".*\\one.*"
|
||||||
|
regex2 = r".*one.*"
|
||||||
|
self.exclude_list.add(regex)
|
||||||
|
self.exclude_list.mark(regex)
|
||||||
|
compiled_re = [x.pattern for x in self.exclude_list._excluded_compiled]
|
||||||
|
files_re = [x.pattern for x in self.exclude_list.compiled_files]
|
||||||
|
paths_re = [x.pattern for x in self.exclude_list.compiled_paths]
|
||||||
|
assert regex in compiled_re
|
||||||
|
assert regex not in files_re
|
||||||
|
assert regex in paths_re
|
||||||
|
self.exclude_list.rename(regex, regex2)
|
||||||
|
compiled_re = [x.pattern for x in self.exclude_list._excluded_compiled]
|
||||||
|
files_re = [x.pattern for x in self.exclude_list.compiled_files]
|
||||||
|
paths_re = [x.pattern for x in self.exclude_list.compiled_paths]
|
||||||
|
assert regex not in compiled_re
|
||||||
|
assert regex2 in compiled_re
|
||||||
|
assert regex2 in files_re
|
||||||
|
assert regex2 not in paths_re
|
||||||
|
|
||||||
def test_restore_default(self):
|
def test_restore_default(self):
|
||||||
"""Only unmark previously added regexes and mark the pre-defined ones"""
|
"""Only unmark previously added regexes and mark the pre-defined ones"""
|
||||||
regex = r"one"
|
regex = r"one"
|
||||||
@@ -208,26 +223,163 @@ class TestCaseListEmpty:
|
|||||||
if compiled_re.pattern == re:
|
if compiled_re.pattern == re:
|
||||||
found = True
|
found = True
|
||||||
if not found:
|
if not found:
|
||||||
raise(Exception(f"Default RE {re} not found in compiled list."))
|
raise (Exception(f"Default RE {re} not found in compiled list."))
|
||||||
continue
|
|
||||||
eq_(len(default_regexes), len(self.exclude_list.compiled))
|
eq_(len(default_regexes), len(self.exclude_list.compiled))
|
||||||
|
|
||||||
|
|
||||||
|
class TestCaseListEmptyUnion(TestCaseListEmpty):
|
||||||
|
"""Same but with union regex"""
|
||||||
|
|
||||||
|
def setup_method(self, method):
|
||||||
|
self.app = DupeGuru()
|
||||||
|
self.app.exclude_list = ExcludeList(union_regex=True)
|
||||||
|
self.exclude_list = self.app.exclude_list
|
||||||
|
|
||||||
|
def test_add_mark_and_remove_regex(self):
|
||||||
|
regex1 = r"one"
|
||||||
|
regex2 = r"two"
|
||||||
|
self.exclude_list.add(regex1)
|
||||||
|
assert regex1 in self.exclude_list
|
||||||
|
self.exclude_list.add(regex2)
|
||||||
|
self.exclude_list.mark(regex1)
|
||||||
|
self.exclude_list.mark(regex2)
|
||||||
|
eq_(len(self.exclude_list), 2)
|
||||||
|
eq_(len(self.exclude_list.compiled), 1)
|
||||||
|
compiled_files = [x for x in self.exclude_list.compiled_files]
|
||||||
|
eq_(len(compiled_files), 1) # Two patterns joined together into one
|
||||||
|
assert "|" in compiled_files[0].pattern
|
||||||
|
self.exclude_list.remove(regex2)
|
||||||
|
assert regex2 not in self.exclude_list
|
||||||
|
eq_(len(self.exclude_list), 1)
|
||||||
|
|
||||||
|
def test_rename_regex_file_to_path(self):
|
||||||
|
regex = r".*/one.*"
|
||||||
|
if ISWINDOWS:
|
||||||
|
regex = r".*\\one.*"
|
||||||
|
regex2 = r".*one.*"
|
||||||
|
self.exclude_list.add(regex)
|
||||||
|
self.exclude_list.mark(regex)
|
||||||
|
eq_(len([x for x in self.exclude_list]), 1)
|
||||||
|
compiled_re = [x.pattern for x in self.exclude_list.compiled]
|
||||||
|
files_re = [x.pattern for x in self.exclude_list.compiled_files]
|
||||||
|
paths_re = [x.pattern for x in self.exclude_list.compiled_paths]
|
||||||
|
assert regex in compiled_re
|
||||||
|
assert regex not in files_re
|
||||||
|
assert regex in paths_re
|
||||||
|
self.exclude_list.rename(regex, regex2)
|
||||||
|
eq_(len([x for x in self.exclude_list]), 1)
|
||||||
|
compiled_re = [x.pattern for x in self.exclude_list.compiled]
|
||||||
|
files_re = [x.pattern for x in self.exclude_list.compiled_files]
|
||||||
|
paths_re = [x.pattern for x in self.exclude_list.compiled_paths]
|
||||||
|
assert regex not in compiled_re
|
||||||
|
assert regex2 in compiled_re
|
||||||
|
assert regex2 in files_re
|
||||||
|
assert regex2 not in paths_re
|
||||||
|
|
||||||
|
def test_restore_default(self):
|
||||||
|
"""Only unmark previously added regexes and mark the pre-defined ones"""
|
||||||
|
regex = r"one"
|
||||||
|
self.exclude_list.add(regex)
|
||||||
|
self.exclude_list.mark(regex)
|
||||||
|
self.exclude_list.restore_defaults()
|
||||||
|
eq_(len(default_regexes), self.exclude_list.marked_count)
|
||||||
|
# added regex shouldn't be marked
|
||||||
|
eq_(self.exclude_list.is_marked(regex), False)
|
||||||
|
# added regex shouldn't be in compiled list either
|
||||||
|
compiled = [x for x in self.exclude_list.compiled]
|
||||||
|
assert regex not in compiled
|
||||||
|
# Need to escape both to get the same strings after compilation
|
||||||
|
compiled_escaped = set([x.encode("unicode-escape").decode() for x in compiled[0].pattern.split("|")])
|
||||||
|
default_escaped = set([x.encode("unicode-escape").decode() for x in default_regexes])
|
||||||
|
assert compiled_escaped == default_escaped
|
||||||
|
eq_(len(default_regexes), len(compiled[0].pattern.split("|")))
|
||||||
|
|
||||||
|
|
||||||
class TestCaseDictEmpty(TestCaseListEmpty):
|
class TestCaseDictEmpty(TestCaseListEmpty):
|
||||||
"""Same, but with dictionary implementation"""
|
"""Same, but with dictionary implementation"""
|
||||||
|
|
||||||
def setup_method(self, method):
|
def setup_method(self, method):
|
||||||
self.app = DupeGuru()
|
self.app = DupeGuru()
|
||||||
self.app.exclude_list = ExcludeDict(union_regex=False)
|
self.app.exclude_list = ExcludeDict(union_regex=False)
|
||||||
self.exclude_list = self.app.exclude_list
|
self.exclude_list = self.app.exclude_list
|
||||||
|
|
||||||
|
|
||||||
|
class TestCaseDictEmptyUnion(TestCaseDictEmpty):
|
||||||
|
"""Same, but with union regex"""
|
||||||
|
|
||||||
|
def setup_method(self, method):
|
||||||
|
self.app = DupeGuru()
|
||||||
|
self.app.exclude_list = ExcludeDict(union_regex=True)
|
||||||
|
self.exclude_list = self.app.exclude_list
|
||||||
|
|
||||||
|
def test_add_mark_and_remove_regex(self):
|
||||||
|
regex1 = r"one"
|
||||||
|
regex2 = r"two"
|
||||||
|
self.exclude_list.add(regex1)
|
||||||
|
assert regex1 in self.exclude_list
|
||||||
|
self.exclude_list.add(regex2)
|
||||||
|
self.exclude_list.mark(regex1)
|
||||||
|
self.exclude_list.mark(regex2)
|
||||||
|
eq_(len(self.exclude_list), 2)
|
||||||
|
eq_(len(self.exclude_list.compiled), 1)
|
||||||
|
compiled_files = [x for x in self.exclude_list.compiled_files]
|
||||||
|
# two patterns joined into one
|
||||||
|
eq_(len(compiled_files), 1)
|
||||||
|
self.exclude_list.remove(regex2)
|
||||||
|
assert regex2 not in self.exclude_list
|
||||||
|
eq_(len(self.exclude_list), 1)
|
||||||
|
|
||||||
|
def test_rename_regex_file_to_path(self):
|
||||||
|
regex = r".*/one.*"
|
||||||
|
if ISWINDOWS:
|
||||||
|
regex = r".*\\one.*"
|
||||||
|
regex2 = r".*one.*"
|
||||||
|
self.exclude_list.add(regex)
|
||||||
|
self.exclude_list.mark(regex)
|
||||||
|
marked_re = [x for marked, x in self.exclude_list if marked]
|
||||||
|
eq_(len(marked_re), 1)
|
||||||
|
compiled_re = [x.pattern for x in self.exclude_list.compiled]
|
||||||
|
files_re = [x.pattern for x in self.exclude_list.compiled_files]
|
||||||
|
paths_re = [x.pattern for x in self.exclude_list.compiled_paths]
|
||||||
|
assert regex in compiled_re
|
||||||
|
assert regex not in files_re
|
||||||
|
assert regex in paths_re
|
||||||
|
self.exclude_list.rename(regex, regex2)
|
||||||
|
compiled_re = [x.pattern for x in self.exclude_list.compiled]
|
||||||
|
files_re = [x.pattern for x in self.exclude_list.compiled_files]
|
||||||
|
paths_re = [x.pattern for x in self.exclude_list.compiled_paths]
|
||||||
|
assert regex not in compiled_re
|
||||||
|
assert regex2 in compiled_re
|
||||||
|
assert regex2 in files_re
|
||||||
|
assert regex2 not in paths_re
|
||||||
|
|
||||||
|
def test_restore_default(self):
|
||||||
|
"""Only unmark previously added regexes and mark the pre-defined ones"""
|
||||||
|
regex = r"one"
|
||||||
|
self.exclude_list.add(regex)
|
||||||
|
self.exclude_list.mark(regex)
|
||||||
|
self.exclude_list.restore_defaults()
|
||||||
|
eq_(len(default_regexes), self.exclude_list.marked_count)
|
||||||
|
# added regex shouldn't be marked
|
||||||
|
eq_(self.exclude_list.is_marked(regex), False)
|
||||||
|
# added regex shouldn't be in compiled list either
|
||||||
|
compiled = [x for x in self.exclude_list.compiled]
|
||||||
|
assert regex not in compiled
|
||||||
|
# Need to escape both to get the same strings after compilation
|
||||||
|
compiled_escaped = set([x.encode("unicode-escape").decode() for x in compiled[0].pattern.split("|")])
|
||||||
|
default_escaped = set([x.encode("unicode-escape").decode() for x in default_regexes])
|
||||||
|
assert compiled_escaped == default_escaped
|
||||||
|
eq_(len(default_regexes), len(compiled[0].pattern.split("|")))
|
||||||
|
|
||||||
|
|
||||||
def split_union(pattern_object):
|
def split_union(pattern_object):
|
||||||
"""Returns list of strings for each union pattern"""
|
"""Returns list of strings for each union pattern"""
|
||||||
return [x for x in pattern_object.pattern.split("|")]
|
return [x for x in pattern_object.pattern.split("|")]
|
||||||
|
|
||||||
|
|
||||||
class TestCaseCompiledList():
|
class TestCaseCompiledList:
|
||||||
"""Test consistency between union or and separate versions."""
|
"""Test consistency between union or and separate versions."""
|
||||||
|
|
||||||
def setup_method(self, method):
|
def setup_method(self, method):
|
||||||
self.e_separate = ExcludeList(union_regex=False)
|
self.e_separate = ExcludeList(union_regex=False)
|
||||||
self.e_separate.restore_defaults()
|
self.e_separate.restore_defaults()
|
||||||
@@ -275,6 +427,7 @@ class TestCaseCompiledList():
|
|||||||
|
|
||||||
class TestCaseCompiledDict(TestCaseCompiledList):
|
class TestCaseCompiledDict(TestCaseCompiledList):
|
||||||
"""Test the dictionary version"""
|
"""Test the dictionary version"""
|
||||||
|
|
||||||
def setup_method(self, method):
|
def setup_method(self, method):
|
||||||
self.e_separate = ExcludeDict(union_regex=False)
|
self.e_separate = ExcludeDict(union_regex=False)
|
||||||
self.e_separate.restore_defaults()
|
self.e_separate.restore_defaults()
|
||||||
|
|||||||
@@ -7,6 +7,7 @@
|
|||||||
# http://www.gnu.org/licenses/gpl-3.0.html
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
from os import urandom
|
||||||
|
|
||||||
from hscommon.path import Path
|
from hscommon.path import Path
|
||||||
from hscommon.testutil import eq_
|
from hscommon.testutil import eq_
|
||||||
@@ -15,6 +16,36 @@ from core.tests.directories_test import create_fake_fs
|
|||||||
from .. import fs
|
from .. import fs
|
||||||
|
|
||||||
|
|
||||||
|
def create_fake_fs_with_random_data(rootpath):
|
||||||
|
rootpath = rootpath["fs"]
|
||||||
|
rootpath.mkdir()
|
||||||
|
rootpath["dir1"].mkdir()
|
||||||
|
rootpath["dir2"].mkdir()
|
||||||
|
rootpath["dir3"].mkdir()
|
||||||
|
fp = rootpath["file1.test"].open("wb")
|
||||||
|
data1 = urandom(200 * 1024) # 200KiB
|
||||||
|
data2 = urandom(1024 * 1024) # 1MiB
|
||||||
|
data3 = urandom(10 * 1024 * 1024) # 10MiB
|
||||||
|
fp.write(data1)
|
||||||
|
fp.close()
|
||||||
|
fp = rootpath["file2.test"].open("wb")
|
||||||
|
fp.write(data2)
|
||||||
|
fp.close()
|
||||||
|
fp = rootpath["file3.test"].open("wb")
|
||||||
|
fp.write(data3)
|
||||||
|
fp.close()
|
||||||
|
fp = rootpath["dir1"]["file1.test"].open("wb")
|
||||||
|
fp.write(data1)
|
||||||
|
fp.close()
|
||||||
|
fp = rootpath["dir2"]["file2.test"].open("wb")
|
||||||
|
fp.write(data2)
|
||||||
|
fp.close()
|
||||||
|
fp = rootpath["dir3"]["file3.test"].open("wb")
|
||||||
|
fp.write(data3)
|
||||||
|
fp.close()
|
||||||
|
return rootpath
|
||||||
|
|
||||||
|
|
||||||
def test_size_aggregates_subfiles(tmpdir):
|
def test_size_aggregates_subfiles(tmpdir):
|
||||||
p = create_fake_fs(Path(str(tmpdir)))
|
p = create_fake_fs(Path(str(tmpdir)))
|
||||||
b = fs.Folder(p)
|
b = fs.Folder(p)
|
||||||
@@ -25,7 +56,7 @@ def test_md5_aggregate_subfiles_sorted(tmpdir):
|
|||||||
# dir.allfiles can return child in any order. Thus, bundle.md5 must aggregate
|
# dir.allfiles can return child in any order. Thus, bundle.md5 must aggregate
|
||||||
# all files' md5 it contains, but it must make sure that it does so in the
|
# all files' md5 it contains, but it must make sure that it does so in the
|
||||||
# same order everytime.
|
# same order everytime.
|
||||||
p = create_fake_fs(Path(str(tmpdir)))
|
p = create_fake_fs_with_random_data(Path(str(tmpdir)))
|
||||||
b = fs.Folder(p)
|
b = fs.Folder(p)
|
||||||
md51 = fs.File(p["dir1"]["file1.test"]).md5
|
md51 = fs.File(p["dir1"]["file1.test"]).md5
|
||||||
md52 = fs.File(p["dir2"]["file2.test"]).md5
|
md52 = fs.File(p["dir2"]["file2.test"]).md5
|
||||||
@@ -41,6 +72,36 @@ def test_md5_aggregate_subfiles_sorted(tmpdir):
|
|||||||
eq_(b.md5, md5.digest())
|
eq_(b.md5, md5.digest())
|
||||||
|
|
||||||
|
|
||||||
|
def test_partial_md5_aggregate_subfile_sorted(tmpdir):
|
||||||
|
p = create_fake_fs_with_random_data(Path(str(tmpdir)))
|
||||||
|
b = fs.Folder(p)
|
||||||
|
md51 = fs.File(p["dir1"]["file1.test"]).md5partial
|
||||||
|
md52 = fs.File(p["dir2"]["file2.test"]).md5partial
|
||||||
|
md53 = fs.File(p["dir3"]["file3.test"]).md5partial
|
||||||
|
md54 = fs.File(p["file1.test"]).md5partial
|
||||||
|
md55 = fs.File(p["file2.test"]).md5partial
|
||||||
|
md56 = fs.File(p["file3.test"]).md5partial
|
||||||
|
# The expected md5 is the md5 of md5s for folders and the direct md5 for files
|
||||||
|
folder_md51 = hashlib.md5(md51).digest()
|
||||||
|
folder_md52 = hashlib.md5(md52).digest()
|
||||||
|
folder_md53 = hashlib.md5(md53).digest()
|
||||||
|
md5 = hashlib.md5(folder_md51 + folder_md52 + folder_md53 + md54 + md55 + md56)
|
||||||
|
eq_(b.md5partial, md5.digest())
|
||||||
|
|
||||||
|
md51 = fs.File(p["dir1"]["file1.test"]).md5samples
|
||||||
|
md52 = fs.File(p["dir2"]["file2.test"]).md5samples
|
||||||
|
md53 = fs.File(p["dir3"]["file3.test"]).md5samples
|
||||||
|
md54 = fs.File(p["file1.test"]).md5samples
|
||||||
|
md55 = fs.File(p["file2.test"]).md5samples
|
||||||
|
md56 = fs.File(p["file3.test"]).md5samples
|
||||||
|
# The expected md5 is the md5 of md5s for folders and the direct md5 for files
|
||||||
|
folder_md51 = hashlib.md5(md51).digest()
|
||||||
|
folder_md52 = hashlib.md5(md52).digest()
|
||||||
|
folder_md53 = hashlib.md5(md53).digest()
|
||||||
|
md5 = hashlib.md5(folder_md51 + folder_md52 + folder_md53 + md54 + md55 + md56)
|
||||||
|
eq_(b.md5samples, md5.digest())
|
||||||
|
|
||||||
|
|
||||||
def test_has_file_attrs(tmpdir):
|
def test_has_file_attrs(tmpdir):
|
||||||
# a Folder must behave like a file, so it must have mtime attributes
|
# a Folder must behave like a file, so it must have mtime attributes
|
||||||
b = fs.Folder(Path(str(tmpdir)))
|
b = fs.Folder(Path(str(tmpdir)))
|
||||||
|
|||||||
@@ -16,54 +16,54 @@ from ..ignore import IgnoreList
|
|||||||
def test_empty():
|
def test_empty():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
eq_(0, len(il))
|
eq_(0, len(il))
|
||||||
assert not il.AreIgnored("foo", "bar")
|
assert not il.are_ignored("foo", "bar")
|
||||||
|
|
||||||
|
|
||||||
def test_simple():
|
def test_simple():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
assert il.AreIgnored("foo", "bar")
|
assert il.are_ignored("foo", "bar")
|
||||||
assert il.AreIgnored("bar", "foo")
|
assert il.are_ignored("bar", "foo")
|
||||||
assert not il.AreIgnored("foo", "bleh")
|
assert not il.are_ignored("foo", "bleh")
|
||||||
assert not il.AreIgnored("bleh", "bar")
|
assert not il.are_ignored("bleh", "bar")
|
||||||
eq_(1, len(il))
|
eq_(1, len(il))
|
||||||
|
|
||||||
|
|
||||||
def test_multiple():
|
def test_multiple():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
il.Ignore("foo", "bleh")
|
il.ignore("foo", "bleh")
|
||||||
il.Ignore("bleh", "bar")
|
il.ignore("bleh", "bar")
|
||||||
il.Ignore("aybabtu", "bleh")
|
il.ignore("aybabtu", "bleh")
|
||||||
assert il.AreIgnored("foo", "bar")
|
assert il.are_ignored("foo", "bar")
|
||||||
assert il.AreIgnored("bar", "foo")
|
assert il.are_ignored("bar", "foo")
|
||||||
assert il.AreIgnored("foo", "bleh")
|
assert il.are_ignored("foo", "bleh")
|
||||||
assert il.AreIgnored("bleh", "bar")
|
assert il.are_ignored("bleh", "bar")
|
||||||
assert not il.AreIgnored("aybabtu", "bar")
|
assert not il.are_ignored("aybabtu", "bar")
|
||||||
eq_(4, len(il))
|
eq_(4, len(il))
|
||||||
|
|
||||||
|
|
||||||
def test_clear():
|
def test_clear():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
il.Clear()
|
il.clear()
|
||||||
assert not il.AreIgnored("foo", "bar")
|
assert not il.are_ignored("foo", "bar")
|
||||||
assert not il.AreIgnored("bar", "foo")
|
assert not il.are_ignored("bar", "foo")
|
||||||
eq_(0, len(il))
|
eq_(0, len(il))
|
||||||
|
|
||||||
|
|
||||||
def test_add_same_twice():
|
def test_add_same_twice():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
il.Ignore("bar", "foo")
|
il.ignore("bar", "foo")
|
||||||
eq_(1, len(il))
|
eq_(1, len(il))
|
||||||
|
|
||||||
|
|
||||||
def test_save_to_xml():
|
def test_save_to_xml():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
il.Ignore("foo", "bleh")
|
il.ignore("foo", "bleh")
|
||||||
il.Ignore("bleh", "bar")
|
il.ignore("bleh", "bar")
|
||||||
f = io.BytesIO()
|
f = io.BytesIO()
|
||||||
il.save_to_xml(f)
|
il.save_to_xml(f)
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
@@ -73,50 +73,46 @@ def test_save_to_xml():
|
|||||||
eq_(len(root), 2)
|
eq_(len(root), 2)
|
||||||
eq_(len([c for c in root if c.tag == "file"]), 2)
|
eq_(len([c for c in root if c.tag == "file"]), 2)
|
||||||
f1, f2 = root[:]
|
f1, f2 = root[:]
|
||||||
subchildren = [c for c in f1 if c.tag == "file"] + [
|
subchildren = [c for c in f1 if c.tag == "file"] + [c for c in f2 if c.tag == "file"]
|
||||||
c for c in f2 if c.tag == "file"
|
|
||||||
]
|
|
||||||
eq_(len(subchildren), 3)
|
eq_(len(subchildren), 3)
|
||||||
|
|
||||||
|
|
||||||
def test_SaveThenLoad():
|
def test_save_then_load():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
il.Ignore("foo", "bleh")
|
il.ignore("foo", "bleh")
|
||||||
il.Ignore("bleh", "bar")
|
il.ignore("bleh", "bar")
|
||||||
il.Ignore("\u00e9", "bar")
|
il.ignore("\u00e9", "bar")
|
||||||
f = io.BytesIO()
|
f = io.BytesIO()
|
||||||
il.save_to_xml(f)
|
il.save_to_xml(f)
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.load_from_xml(f)
|
il.load_from_xml(f)
|
||||||
eq_(4, len(il))
|
eq_(4, len(il))
|
||||||
assert il.AreIgnored("\u00e9", "bar")
|
assert il.are_ignored("\u00e9", "bar")
|
||||||
|
|
||||||
|
|
||||||
def test_LoadXML_with_empty_file_tags():
|
def test_load_xml_with_empty_file_tags():
|
||||||
f = io.BytesIO()
|
f = io.BytesIO()
|
||||||
f.write(
|
f.write(b'<?xml version="1.0" encoding="utf-8"?><ignore_list><file><file/></file></ignore_list>')
|
||||||
b'<?xml version="1.0" encoding="utf-8"?><ignore_list><file><file/></file></ignore_list>'
|
|
||||||
)
|
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.load_from_xml(f)
|
il.load_from_xml(f)
|
||||||
eq_(0, len(il))
|
eq_(0, len(il))
|
||||||
|
|
||||||
|
|
||||||
def test_AreIgnore_works_when_a_child_is_a_key_somewhere_else():
|
def test_are_ignore_works_when_a_child_is_a_key_somewhere_else():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
il.Ignore("bar", "baz")
|
il.ignore("bar", "baz")
|
||||||
assert il.AreIgnored("bar", "foo")
|
assert il.are_ignored("bar", "foo")
|
||||||
|
|
||||||
|
|
||||||
def test_no_dupes_when_a_child_is_a_key_somewhere_else():
|
def test_no_dupes_when_a_child_is_a_key_somewhere_else():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
il.Ignore("bar", "baz")
|
il.ignore("bar", "baz")
|
||||||
il.Ignore("bar", "foo")
|
il.ignore("bar", "foo")
|
||||||
eq_(2, len(il))
|
eq_(2, len(il))
|
||||||
|
|
||||||
|
|
||||||
@@ -125,7 +121,7 @@ def test_iterate():
|
|||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
expected = [("foo", "bar"), ("bar", "baz"), ("foo", "baz")]
|
expected = [("foo", "bar"), ("bar", "baz"), ("foo", "baz")]
|
||||||
for i in expected:
|
for i in expected:
|
||||||
il.Ignore(i[0], i[1])
|
il.ignore(i[0], i[1])
|
||||||
for i in il:
|
for i in il:
|
||||||
expected.remove(i) # No exception should be raised
|
expected.remove(i) # No exception should be raised
|
||||||
assert not expected # expected should be empty
|
assert not expected # expected should be empty
|
||||||
@@ -133,18 +129,18 @@ def test_iterate():
|
|||||||
|
|
||||||
def test_filter():
|
def test_filter():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
il.Ignore("bar", "baz")
|
il.ignore("bar", "baz")
|
||||||
il.Ignore("foo", "baz")
|
il.ignore("foo", "baz")
|
||||||
il.Filter(lambda f, s: f == "bar")
|
il.filter(lambda f, s: f == "bar")
|
||||||
eq_(1, len(il))
|
eq_(1, len(il))
|
||||||
assert not il.AreIgnored("foo", "bar")
|
assert not il.are_ignored("foo", "bar")
|
||||||
assert il.AreIgnored("bar", "baz")
|
assert il.are_ignored("bar", "baz")
|
||||||
|
|
||||||
|
|
||||||
def test_save_with_non_ascii_items():
|
def test_save_with_non_ascii_items():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("\xac", "\xbf")
|
il.ignore("\xac", "\xbf")
|
||||||
f = io.BytesIO()
|
f = io.BytesIO()
|
||||||
try:
|
try:
|
||||||
il.save_to_xml(f)
|
il.save_to_xml(f)
|
||||||
@@ -155,29 +151,29 @@ def test_save_with_non_ascii_items():
|
|||||||
def test_len():
|
def test_len():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
eq_(0, len(il))
|
eq_(0, len(il))
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
eq_(1, len(il))
|
eq_(1, len(il))
|
||||||
|
|
||||||
|
|
||||||
def test_nonzero():
|
def test_nonzero():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
assert not il
|
assert not il
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
assert il
|
assert il
|
||||||
|
|
||||||
|
|
||||||
def test_remove():
|
def test_remove():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
il.Ignore("foo", "baz")
|
il.ignore("foo", "baz")
|
||||||
il.remove("bar", "foo")
|
il.remove("bar", "foo")
|
||||||
eq_(len(il), 1)
|
eq_(len(il), 1)
|
||||||
assert not il.AreIgnored("foo", "bar")
|
assert not il.are_ignored("foo", "bar")
|
||||||
|
|
||||||
|
|
||||||
def test_remove_non_existant():
|
def test_remove_non_existant():
|
||||||
il = IgnoreList()
|
il = IgnoreList()
|
||||||
il.Ignore("foo", "bar")
|
il.ignore("foo", "bar")
|
||||||
il.Ignore("foo", "baz")
|
il.ignore("foo", "baz")
|
||||||
with raises(ValueError):
|
with raises(ValueError):
|
||||||
il.remove("foo", "bleh")
|
il.remove("foo", "bleh")
|
||||||
|
|||||||
@@ -117,9 +117,7 @@ class TestCaseResultsWithSomeGroups:
|
|||||||
assert d is g.ref
|
assert d is g.ref
|
||||||
|
|
||||||
def test_sort_groups(self):
|
def test_sort_groups(self):
|
||||||
self.results.make_ref(
|
self.results.make_ref(self.objects[1]) # We want to make the 1024 sized object to go ref.
|
||||||
self.objects[1]
|
|
||||||
) # We want to make the 1024 sized object to go ref.
|
|
||||||
g1, g2 = self.groups
|
g1, g2 = self.groups
|
||||||
self.results.sort_groups("size")
|
self.results.sort_groups("size")
|
||||||
assert self.results.groups[0] is g2
|
assert self.results.groups[0] is g2
|
||||||
@@ -129,9 +127,7 @@ class TestCaseResultsWithSomeGroups:
|
|||||||
assert self.results.groups[1] is g2
|
assert self.results.groups[1] is g2
|
||||||
|
|
||||||
def test_set_groups_when_sorted(self):
|
def test_set_groups_when_sorted(self):
|
||||||
self.results.make_ref(
|
self.results.make_ref(self.objects[1]) # We want to make the 1024 sized object to go ref.
|
||||||
self.objects[1]
|
|
||||||
) # We want to make the 1024 sized object to go ref.
|
|
||||||
self.results.sort_groups("size")
|
self.results.sort_groups("size")
|
||||||
objects, matches, groups = GetTestGroups()
|
objects, matches, groups = GetTestGroups()
|
||||||
g1, g2 = groups
|
g1, g2 = groups
|
||||||
@@ -406,7 +402,7 @@ class TestCaseResultsMarkings:
|
|||||||
self.results.make_ref(d)
|
self.results.make_ref(d)
|
||||||
eq_("0 / 3 (0.00 B / 3.00 B) duplicates marked.", self.results.stat_line)
|
eq_("0 / 3 (0.00 B / 3.00 B) duplicates marked.", self.results.stat_line)
|
||||||
|
|
||||||
def test_SaveXML(self):
|
def test_save_xml(self):
|
||||||
self.results.mark(self.objects[1])
|
self.results.mark(self.objects[1])
|
||||||
self.results.mark_invert()
|
self.results.mark_invert()
|
||||||
f = io.BytesIO()
|
f = io.BytesIO()
|
||||||
@@ -423,7 +419,7 @@ class TestCaseResultsMarkings:
|
|||||||
eq_("n", d1.get("marked"))
|
eq_("n", d1.get("marked"))
|
||||||
eq_("y", d2.get("marked"))
|
eq_("y", d2.get("marked"))
|
||||||
|
|
||||||
def test_LoadXML(self):
|
def test_load_xml(self):
|
||||||
def get_file(path):
|
def get_file(path):
|
||||||
return [f for f in self.objects if str(f.path) == path][0]
|
return [f for f in self.objects if str(f.path) == path][0]
|
||||||
|
|
||||||
@@ -489,7 +485,7 @@ class TestCaseResultsXML:
|
|||||||
eq_("ibabtu", d1.get("words"))
|
eq_("ibabtu", d1.get("words"))
|
||||||
eq_("ibabtu", d2.get("words"))
|
eq_("ibabtu", d2.get("words"))
|
||||||
|
|
||||||
def test_LoadXML(self):
|
def test_load_xml(self):
|
||||||
def get_file(path):
|
def get_file(path):
|
||||||
return [f for f in self.objects if str(f.path) == path][0]
|
return [f for f in self.objects if str(f.path) == path][0]
|
||||||
|
|
||||||
@@ -521,7 +517,7 @@ class TestCaseResultsXML:
|
|||||||
eq_(["ibabtu"], g2[0].words)
|
eq_(["ibabtu"], g2[0].words)
|
||||||
eq_(["ibabtu"], g2[1].words)
|
eq_(["ibabtu"], g2[1].words)
|
||||||
|
|
||||||
def test_LoadXML_with_filename(self, tmpdir):
|
def test_load_xml_with_filename(self, tmpdir):
|
||||||
def get_file(path):
|
def get_file(path):
|
||||||
return [f for f in self.objects if str(f.path) == path][0]
|
return [f for f in self.objects if str(f.path) == path][0]
|
||||||
|
|
||||||
@@ -533,7 +529,7 @@ class TestCaseResultsXML:
|
|||||||
r.load_from_xml(filename, get_file)
|
r.load_from_xml(filename, get_file)
|
||||||
eq_(2, len(r.groups))
|
eq_(2, len(r.groups))
|
||||||
|
|
||||||
def test_LoadXML_with_some_files_that_dont_exist_anymore(self):
|
def test_load_xml_with_some_files_that_dont_exist_anymore(self):
|
||||||
def get_file(path):
|
def get_file(path):
|
||||||
if path.endswith("ibabtu 2"):
|
if path.endswith("ibabtu 2"):
|
||||||
return None
|
return None
|
||||||
@@ -549,7 +545,7 @@ class TestCaseResultsXML:
|
|||||||
eq_(1, len(r.groups))
|
eq_(1, len(r.groups))
|
||||||
eq_(3, len(r.groups[0]))
|
eq_(3, len(r.groups[0]))
|
||||||
|
|
||||||
def test_LoadXML_missing_attributes_and_bogus_elements(self):
|
def test_load_xml_missing_attributes_and_bogus_elements(self):
|
||||||
def get_file(path):
|
def get_file(path):
|
||||||
return [f for f in self.objects if str(f.path) == path][0]
|
return [f for f in self.objects if str(f.path) == path][0]
|
||||||
|
|
||||||
@@ -601,9 +597,7 @@ class TestCaseResultsXML:
|
|||||||
matches = engine.getmatches(objects) # we should have 5 matches
|
matches = engine.getmatches(objects) # we should have 5 matches
|
||||||
groups = engine.get_groups(matches) # We should have 2 groups
|
groups = engine.get_groups(matches) # We should have 2 groups
|
||||||
for g in groups:
|
for g in groups:
|
||||||
g.prioritize(
|
g.prioritize(lambda x: objects.index(x)) # We want the dupes to be in the same order as the list is
|
||||||
lambda x: objects.index(x)
|
|
||||||
) # We want the dupes to be in the same order as the list is
|
|
||||||
app = DupeGuru()
|
app = DupeGuru()
|
||||||
results = Results(app)
|
results = Results(app)
|
||||||
results.groups = groups
|
results.groups = groups
|
||||||
@@ -807,9 +801,7 @@ class TestCaseResultsFilter:
|
|||||||
# Now the stats should display *2* markable dupes (instead of 1)
|
# Now the stats should display *2* markable dupes (instead of 1)
|
||||||
expected = "0 / 2 (0.00 B / 2.00 B) duplicates marked. filter: foo"
|
expected = "0 / 2 (0.00 B / 2.00 B) duplicates marked. filter: foo"
|
||||||
eq_(expected, self.results.stat_line)
|
eq_(expected, self.results.stat_line)
|
||||||
self.results.apply_filter(
|
self.results.apply_filter(None) # Now let's make sure our unfiltered results aren't fucked up
|
||||||
None
|
|
||||||
) # Now let's make sure our unfiltered results aren't fucked up
|
|
||||||
expected = "0 / 3 (0.00 B / 3.00 B) duplicates marked."
|
expected = "0 / 3 (0.00 B / 3.00 B) duplicates marked."
|
||||||
eq_(expected, self.results.stat_line)
|
eq_(expected, self.results.stat_line)
|
||||||
|
|
||||||
|
|||||||
@@ -52,10 +52,11 @@ def test_empty(fake_fileexists):
|
|||||||
def test_default_settings(fake_fileexists):
|
def test_default_settings(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
eq_(s.min_match_percentage, 80)
|
eq_(s.min_match_percentage, 80)
|
||||||
eq_(s.scan_type, ScanType.Filename)
|
eq_(s.scan_type, ScanType.FILENAME)
|
||||||
eq_(s.mix_file_kind, True)
|
eq_(s.mix_file_kind, True)
|
||||||
eq_(s.word_weighting, False)
|
eq_(s.word_weighting, False)
|
||||||
eq_(s.match_similar_words, False)
|
eq_(s.match_similar_words, False)
|
||||||
|
eq_(s.big_file_size_threshold, 0)
|
||||||
|
|
||||||
|
|
||||||
def test_simple_with_default_settings(fake_fileexists):
|
def test_simple_with_default_settings(fake_fileexists):
|
||||||
@@ -97,7 +98,7 @@ def test_trim_all_ref_groups(fake_fileexists):
|
|||||||
eq_(s.discarded_file_count, 0)
|
eq_(s.discarded_file_count, 0)
|
||||||
|
|
||||||
|
|
||||||
def test_priorize(fake_fileexists):
|
def test_prioritize(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
f = [
|
f = [
|
||||||
no("foo", path="p1"),
|
no("foo", path="p1"),
|
||||||
@@ -118,11 +119,11 @@ def test_priorize(fake_fileexists):
|
|||||||
|
|
||||||
def test_content_scan(fake_fileexists):
|
def test_content_scan(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Contents
|
s.scan_type = ScanType.CONTENTS
|
||||||
f = [no("foo"), no("bar"), no("bleh")]
|
f = [no("foo"), no("bar"), no("bleh")]
|
||||||
f[0].md5 = f[0].md5partial = "foobar"
|
f[0].md5 = f[0].md5partial = f[0].md5samples = "foobar"
|
||||||
f[1].md5 = f[1].md5partial = "foobar"
|
f[1].md5 = f[1].md5partial = f[1].md5samples = "foobar"
|
||||||
f[2].md5 = f[2].md5partial = "bleh"
|
f[2].md5 = f[2].md5partial = f[1].md5samples = "bleh"
|
||||||
r = s.get_dupe_groups(f)
|
r = s.get_dupe_groups(f)
|
||||||
eq_(len(r), 1)
|
eq_(len(r), 1)
|
||||||
eq_(len(r[0]), 2)
|
eq_(len(r[0]), 2)
|
||||||
@@ -132,22 +133,51 @@ def test_content_scan(fake_fileexists):
|
|||||||
def test_content_scan_compare_sizes_first(fake_fileexists):
|
def test_content_scan_compare_sizes_first(fake_fileexists):
|
||||||
class MyFile(no):
|
class MyFile(no):
|
||||||
@property
|
@property
|
||||||
def md5(file):
|
def md5(self):
|
||||||
raise AssertionError()
|
raise AssertionError()
|
||||||
|
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Contents
|
s.scan_type = ScanType.CONTENTS
|
||||||
f = [MyFile("foo", 1), MyFile("bar", 2)]
|
f = [MyFile("foo", 1), MyFile("bar", 2)]
|
||||||
eq_(len(s.get_dupe_groups(f)), 0)
|
eq_(len(s.get_dupe_groups(f)), 0)
|
||||||
|
|
||||||
|
|
||||||
|
def test_big_file_partial_hashes(fake_fileexists):
|
||||||
|
s = Scanner()
|
||||||
|
s.scan_type = ScanType.CONTENTS
|
||||||
|
|
||||||
|
smallsize = 1
|
||||||
|
bigsize = 100 * 1024 * 1024 # 100MB
|
||||||
|
s.big_file_size_threshold = bigsize
|
||||||
|
|
||||||
|
f = [no("bigfoo", bigsize), no("bigbar", bigsize), no("smallfoo", smallsize), no("smallbar", smallsize)]
|
||||||
|
f[0].md5 = f[0].md5partial = f[0].md5samples = "foobar"
|
||||||
|
f[1].md5 = f[1].md5partial = f[1].md5samples = "foobar"
|
||||||
|
f[2].md5 = f[2].md5partial = "bleh"
|
||||||
|
f[3].md5 = f[3].md5partial = "bleh"
|
||||||
|
r = s.get_dupe_groups(f)
|
||||||
|
eq_(len(r), 2)
|
||||||
|
|
||||||
|
# md5partial is still the same, but the file is actually different
|
||||||
|
f[1].md5 = f[1].md5samples = "difffoobar"
|
||||||
|
# here we compare the full md5s, as the user disabled the optimization
|
||||||
|
s.big_file_size_threshold = 0
|
||||||
|
r = s.get_dupe_groups(f)
|
||||||
|
eq_(len(r), 1)
|
||||||
|
|
||||||
|
# here we should compare the md5samples, and see they are different
|
||||||
|
s.big_file_size_threshold = bigsize
|
||||||
|
r = s.get_dupe_groups(f)
|
||||||
|
eq_(len(r), 1)
|
||||||
|
|
||||||
|
|
||||||
def test_min_match_perc_doesnt_matter_for_content_scan(fake_fileexists):
|
def test_min_match_perc_doesnt_matter_for_content_scan(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Contents
|
s.scan_type = ScanType.CONTENTS
|
||||||
f = [no("foo"), no("bar"), no("bleh")]
|
f = [no("foo"), no("bar"), no("bleh")]
|
||||||
f[0].md5 = f[0].md5partial = "foobar"
|
f[0].md5 = f[0].md5partial = f[0].md5samples = "foobar"
|
||||||
f[1].md5 = f[1].md5partial = "foobar"
|
f[1].md5 = f[1].md5partial = f[1].md5samples = "foobar"
|
||||||
f[2].md5 = f[2].md5partial = "bleh"
|
f[2].md5 = f[2].md5partial = f[2].md5samples = "bleh"
|
||||||
s.min_match_percentage = 101
|
s.min_match_percentage = 101
|
||||||
r = s.get_dupe_groups(f)
|
r = s.get_dupe_groups(f)
|
||||||
eq_(len(r), 1)
|
eq_(len(r), 1)
|
||||||
@@ -160,15 +190,12 @@ def test_min_match_perc_doesnt_matter_for_content_scan(fake_fileexists):
|
|||||||
|
|
||||||
def test_content_scan_doesnt_put_md5_in_words_at_the_end(fake_fileexists):
|
def test_content_scan_doesnt_put_md5_in_words_at_the_end(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Contents
|
s.scan_type = ScanType.CONTENTS
|
||||||
f = [no("foo"), no("bar")]
|
f = [no("foo"), no("bar")]
|
||||||
f[0].md5 = f[
|
f[0].md5 = f[0].md5partial = f[0].md5samples = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
||||||
0
|
f[1].md5 = f[1].md5partial = f[1].md5samples = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
||||||
].md5partial = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
|
||||||
f[1].md5 = f[
|
|
||||||
1
|
|
||||||
].md5partial = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f"
|
|
||||||
r = s.get_dupe_groups(f)
|
r = s.get_dupe_groups(f)
|
||||||
|
# FIXME looks like we are missing something here?
|
||||||
r[0]
|
r[0]
|
||||||
|
|
||||||
|
|
||||||
@@ -229,7 +256,7 @@ def test_similar_words(fake_fileexists):
|
|||||||
|
|
||||||
def test_fields(fake_fileexists):
|
def test_fields(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Fields
|
s.scan_type = ScanType.FIELDS
|
||||||
f = [no("The White Stripes - Little Ghost"), no("The White Stripes - Little Acorn")]
|
f = [no("The White Stripes - Little Ghost"), no("The White Stripes - Little Acorn")]
|
||||||
r = s.get_dupe_groups(f)
|
r = s.get_dupe_groups(f)
|
||||||
eq_(len(r), 0)
|
eq_(len(r), 0)
|
||||||
@@ -237,7 +264,7 @@ def test_fields(fake_fileexists):
|
|||||||
|
|
||||||
def test_fields_no_order(fake_fileexists):
|
def test_fields_no_order(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.FieldsNoOrder
|
s.scan_type = ScanType.FIELDSNOORDER
|
||||||
f = [no("The White Stripes - Little Ghost"), no("Little Ghost - The White Stripes")]
|
f = [no("The White Stripes - Little Ghost"), no("Little Ghost - The White Stripes")]
|
||||||
r = s.get_dupe_groups(f)
|
r = s.get_dupe_groups(f)
|
||||||
eq_(len(r), 1)
|
eq_(len(r), 1)
|
||||||
@@ -245,7 +272,7 @@ def test_fields_no_order(fake_fileexists):
|
|||||||
|
|
||||||
def test_tag_scan(fake_fileexists):
|
def test_tag_scan(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Tag
|
s.scan_type = ScanType.TAG
|
||||||
o1 = no("foo")
|
o1 = no("foo")
|
||||||
o2 = no("bar")
|
o2 = no("bar")
|
||||||
o1.artist = "The White Stripes"
|
o1.artist = "The White Stripes"
|
||||||
@@ -258,7 +285,7 @@ def test_tag_scan(fake_fileexists):
|
|||||||
|
|
||||||
def test_tag_with_album_scan(fake_fileexists):
|
def test_tag_with_album_scan(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Tag
|
s.scan_type = ScanType.TAG
|
||||||
s.scanned_tags = set(["artist", "album", "title"])
|
s.scanned_tags = set(["artist", "album", "title"])
|
||||||
o1 = no("foo")
|
o1 = no("foo")
|
||||||
o2 = no("bar")
|
o2 = no("bar")
|
||||||
@@ -278,7 +305,7 @@ def test_tag_with_album_scan(fake_fileexists):
|
|||||||
|
|
||||||
def test_that_dash_in_tags_dont_create_new_fields(fake_fileexists):
|
def test_that_dash_in_tags_dont_create_new_fields(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Tag
|
s.scan_type = ScanType.TAG
|
||||||
s.scanned_tags = set(["artist", "album", "title"])
|
s.scanned_tags = set(["artist", "album", "title"])
|
||||||
s.min_match_percentage = 50
|
s.min_match_percentage = 50
|
||||||
o1 = no("foo")
|
o1 = no("foo")
|
||||||
@@ -295,7 +322,7 @@ def test_that_dash_in_tags_dont_create_new_fields(fake_fileexists):
|
|||||||
|
|
||||||
def test_tag_scan_with_different_scanned(fake_fileexists):
|
def test_tag_scan_with_different_scanned(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Tag
|
s.scan_type = ScanType.TAG
|
||||||
s.scanned_tags = set(["track", "year"])
|
s.scanned_tags = set(["track", "year"])
|
||||||
o1 = no("foo")
|
o1 = no("foo")
|
||||||
o2 = no("bar")
|
o2 = no("bar")
|
||||||
@@ -313,7 +340,7 @@ def test_tag_scan_with_different_scanned(fake_fileexists):
|
|||||||
|
|
||||||
def test_tag_scan_only_scans_existing_tags(fake_fileexists):
|
def test_tag_scan_only_scans_existing_tags(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Tag
|
s.scan_type = ScanType.TAG
|
||||||
s.scanned_tags = set(["artist", "foo"])
|
s.scanned_tags = set(["artist", "foo"])
|
||||||
o1 = no("foo")
|
o1 = no("foo")
|
||||||
o2 = no("bar")
|
o2 = no("bar")
|
||||||
@@ -327,7 +354,7 @@ def test_tag_scan_only_scans_existing_tags(fake_fileexists):
|
|||||||
|
|
||||||
def test_tag_scan_converts_to_str(fake_fileexists):
|
def test_tag_scan_converts_to_str(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Tag
|
s.scan_type = ScanType.TAG
|
||||||
s.scanned_tags = set(["track"])
|
s.scanned_tags = set(["track"])
|
||||||
o1 = no("foo")
|
o1 = no("foo")
|
||||||
o2 = no("bar")
|
o2 = no("bar")
|
||||||
@@ -342,7 +369,7 @@ def test_tag_scan_converts_to_str(fake_fileexists):
|
|||||||
|
|
||||||
def test_tag_scan_non_ascii(fake_fileexists):
|
def test_tag_scan_non_ascii(fake_fileexists):
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Tag
|
s.scan_type = ScanType.TAG
|
||||||
s.scanned_tags = set(["title"])
|
s.scanned_tags = set(["title"])
|
||||||
o1 = no("foo")
|
o1 = no("foo")
|
||||||
o2 = no("bar")
|
o2 = no("bar")
|
||||||
@@ -364,8 +391,8 @@ def test_ignore_list(fake_fileexists):
|
|||||||
f2.path = Path("dir2/foobar")
|
f2.path = Path("dir2/foobar")
|
||||||
f3.path = Path("dir3/foobar")
|
f3.path = Path("dir3/foobar")
|
||||||
ignore_list = IgnoreList()
|
ignore_list = IgnoreList()
|
||||||
ignore_list.Ignore(str(f1.path), str(f2.path))
|
ignore_list.ignore(str(f1.path), str(f2.path))
|
||||||
ignore_list.Ignore(str(f1.path), str(f3.path))
|
ignore_list.ignore(str(f1.path), str(f3.path))
|
||||||
r = s.get_dupe_groups([f1, f2, f3], ignore_list=ignore_list)
|
r = s.get_dupe_groups([f1, f2, f3], ignore_list=ignore_list)
|
||||||
eq_(len(r), 1)
|
eq_(len(r), 1)
|
||||||
g = r[0]
|
g = r[0]
|
||||||
@@ -388,8 +415,8 @@ def test_ignore_list_checks_for_unicode(fake_fileexists):
|
|||||||
f2.path = Path("foo2\u00e9")
|
f2.path = Path("foo2\u00e9")
|
||||||
f3.path = Path("foo3\u00e9")
|
f3.path = Path("foo3\u00e9")
|
||||||
ignore_list = IgnoreList()
|
ignore_list = IgnoreList()
|
||||||
ignore_list.Ignore(str(f1.path), str(f2.path))
|
ignore_list.ignore(str(f1.path), str(f2.path))
|
||||||
ignore_list.Ignore(str(f1.path), str(f3.path))
|
ignore_list.ignore(str(f1.path), str(f3.path))
|
||||||
r = s.get_dupe_groups([f1, f2, f3], ignore_list=ignore_list)
|
r = s.get_dupe_groups([f1, f2, f3], ignore_list=ignore_list)
|
||||||
eq_(len(r), 1)
|
eq_(len(r), 1)
|
||||||
g = r[0]
|
g = r[0]
|
||||||
@@ -493,7 +520,7 @@ def test_dont_group_files_that_dont_exist(tmpdir):
|
|||||||
# In this test, we have to delete one of the files between the get_matches() part and the
|
# In this test, we have to delete one of the files between the get_matches() part and the
|
||||||
# get_groups() part.
|
# get_groups() part.
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Contents
|
s.scan_type = ScanType.CONTENTS
|
||||||
p = Path(str(tmpdir))
|
p = Path(str(tmpdir))
|
||||||
p["file1"].open("w").write("foo")
|
p["file1"].open("w").write("foo")
|
||||||
p["file2"].open("w").write("foo")
|
p["file2"].open("w").write("foo")
|
||||||
@@ -512,23 +539,23 @@ def test_folder_scan_exclude_subfolder_matches(fake_fileexists):
|
|||||||
# when doing a Folders scan type, don't include matches for folders whose parent folder already
|
# when doing a Folders scan type, don't include matches for folders whose parent folder already
|
||||||
# match.
|
# match.
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Folders
|
s.scan_type = ScanType.FOLDERS
|
||||||
topf1 = no("top folder 1", size=42)
|
topf1 = no("top folder 1", size=42)
|
||||||
topf1.md5 = topf1.md5partial = b"some_md5_1"
|
topf1.md5 = topf1.md5partial = topf1.md5samples = b"some_md5_1"
|
||||||
topf1.path = Path("/topf1")
|
topf1.path = Path("/topf1")
|
||||||
topf2 = no("top folder 2", size=42)
|
topf2 = no("top folder 2", size=42)
|
||||||
topf2.md5 = topf2.md5partial = b"some_md5_1"
|
topf2.md5 = topf2.md5partial = topf2.md5samples = b"some_md5_1"
|
||||||
topf2.path = Path("/topf2")
|
topf2.path = Path("/topf2")
|
||||||
subf1 = no("sub folder 1", size=41)
|
subf1 = no("sub folder 1", size=41)
|
||||||
subf1.md5 = subf1.md5partial = b"some_md5_2"
|
subf1.md5 = subf1.md5partial = subf1.md5samples = b"some_md5_2"
|
||||||
subf1.path = Path("/topf1/sub")
|
subf1.path = Path("/topf1/sub")
|
||||||
subf2 = no("sub folder 2", size=41)
|
subf2 = no("sub folder 2", size=41)
|
||||||
subf2.md5 = subf2.md5partial = b"some_md5_2"
|
subf2.md5 = subf2.md5partial = subf2.md5samples = b"some_md5_2"
|
||||||
subf2.path = Path("/topf2/sub")
|
subf2.path = Path("/topf2/sub")
|
||||||
eq_(len(s.get_dupe_groups([topf1, topf2, subf1, subf2])), 1) # only top folders
|
eq_(len(s.get_dupe_groups([topf1, topf2, subf1, subf2])), 1) # only top folders
|
||||||
# however, if another folder matches a subfolder, keep in in the matches
|
# however, if another folder matches a subfolder, keep in in the matches
|
||||||
otherf = no("other folder", size=41)
|
otherf = no("other folder", size=41)
|
||||||
otherf.md5 = otherf.md5partial = b"some_md5_2"
|
otherf.md5 = otherf.md5partial = otherf.md5samples = b"some_md5_2"
|
||||||
otherf.path = Path("/otherfolder")
|
otherf.path = Path("/otherfolder")
|
||||||
eq_(len(s.get_dupe_groups([topf1, topf2, subf1, subf2, otherf])), 2)
|
eq_(len(s.get_dupe_groups([topf1, topf2, subf1, subf2, otherf])), 2)
|
||||||
|
|
||||||
@@ -547,21 +574,21 @@ def test_dont_count_ref_files_as_discarded(fake_fileexists):
|
|||||||
# However, this causes problems in "discarded" counting and we make sure here that we don't
|
# However, this causes problems in "discarded" counting and we make sure here that we don't
|
||||||
# report discarded matches in exact duplicate scans.
|
# report discarded matches in exact duplicate scans.
|
||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Contents
|
s.scan_type = ScanType.CONTENTS
|
||||||
o1 = no("foo", path="p1")
|
o1 = no("foo", path="p1")
|
||||||
o2 = no("foo", path="p2")
|
o2 = no("foo", path="p2")
|
||||||
o3 = no("foo", path="p3")
|
o3 = no("foo", path="p3")
|
||||||
o1.md5 = o1.md5partial = "foobar"
|
o1.md5 = o1.md5partial = o1.md5samples = "foobar"
|
||||||
o2.md5 = o2.md5partial = "foobar"
|
o2.md5 = o2.md5partial = o2.md5samples = "foobar"
|
||||||
o3.md5 = o3.md5partial = "foobar"
|
o3.md5 = o3.md5partial = o3.md5samples = "foobar"
|
||||||
o1.is_ref = True
|
o1.is_ref = True
|
||||||
o2.is_ref = True
|
o2.is_ref = True
|
||||||
eq_(len(s.get_dupe_groups([o1, o2, o3])), 1)
|
eq_(len(s.get_dupe_groups([o1, o2, o3])), 1)
|
||||||
eq_(s.discarded_file_count, 0)
|
eq_(s.discarded_file_count, 0)
|
||||||
|
|
||||||
|
|
||||||
def test_priorize_me(fake_fileexists):
|
def test_prioritize_me(fake_fileexists):
|
||||||
# in ScannerME, bitrate goes first (right after is_ref) in priorization
|
# in ScannerME, bitrate goes first (right after is_ref) in prioritization
|
||||||
s = ScannerME()
|
s = ScannerME()
|
||||||
o1, o2 = no("foo", path="p1"), no("foo", path="p2")
|
o1, o2 = no("foo", path="p1"), no("foo", path="p2")
|
||||||
o1.bitrate = 1
|
o1.bitrate = 1
|
||||||
|
|||||||
@@ -5,6 +5,8 @@
|
|||||||
# http://www.gnu.org/licenses/gpl-3.0.html
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
||||||
|
|
||||||
import time
|
import time
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
from hscommon.util import format_time_decimal
|
from hscommon.util import format_time_decimal
|
||||||
|
|
||||||
@@ -58,3 +60,7 @@ def fix_surrogate_encoding(s, encoding="utf-8"):
|
|||||||
return s.encode(encoding, "replace").decode(encoding)
|
return s.encode(encoding, "replace").decode(encoding)
|
||||||
else:
|
else:
|
||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
def executable_folder():
|
||||||
|
return os.path.dirname(os.path.abspath(sys.argv[0]))
|
||||||
|
|||||||
@@ -30,8 +30,7 @@ from .util import ensure_folder, delete_files_with_pattern
|
|||||||
|
|
||||||
|
|
||||||
def print_and_do(cmd):
|
def print_and_do(cmd):
|
||||||
"""Prints ``cmd`` and executes it in the shell.
|
"""Prints ``cmd`` and executes it in the shell."""
|
||||||
"""
|
|
||||||
print(cmd)
|
print(cmd)
|
||||||
p = Popen(cmd, shell=True)
|
p = Popen(cmd, shell=True)
|
||||||
return p.wait()
|
return p.wait()
|
||||||
@@ -91,16 +90,14 @@ def copy_all(pattern, dst):
|
|||||||
|
|
||||||
|
|
||||||
def ensure_empty_folder(path):
|
def ensure_empty_folder(path):
|
||||||
"""Make sure that the path exists and that it's an empty folder.
|
"""Make sure that the path exists and that it's an empty folder."""
|
||||||
"""
|
|
||||||
if op.exists(path):
|
if op.exists(path):
|
||||||
shutil.rmtree(path)
|
shutil.rmtree(path)
|
||||||
os.mkdir(path)
|
os.mkdir(path)
|
||||||
|
|
||||||
|
|
||||||
def filereplace(filename, outfilename=None, **kwargs):
|
def filereplace(filename, outfilename=None, **kwargs):
|
||||||
"""Reads `filename`, replaces all {variables} in kwargs, and writes the result to `outfilename`.
|
"""Reads `filename`, replaces all {variables} in kwargs, and writes the result to `outfilename`."""
|
||||||
"""
|
|
||||||
if outfilename is None:
|
if outfilename is None:
|
||||||
outfilename = filename
|
outfilename = filename
|
||||||
fp = open(filename, "rt", encoding="utf-8")
|
fp = open(filename, "rt", encoding="utf-8")
|
||||||
@@ -152,9 +149,7 @@ def package_cocoa_app_in_dmg(app_path, destfolder, args):
|
|||||||
# a valid signature.
|
# a valid signature.
|
||||||
if args.sign_identity:
|
if args.sign_identity:
|
||||||
sign_identity = "Developer ID Application: {}".format(args.sign_identity)
|
sign_identity = "Developer ID Application: {}".format(args.sign_identity)
|
||||||
result = print_and_do(
|
result = print_and_do('codesign --force --deep --sign "{}" "{}"'.format(sign_identity, app_path))
|
||||||
'codesign --force --deep --sign "{}" "{}"'.format(sign_identity, app_path)
|
|
||||||
)
|
|
||||||
if result != 0:
|
if result != 0:
|
||||||
print("ERROR: Signing failed. Aborting packaging.")
|
print("ERROR: Signing failed. Aborting packaging.")
|
||||||
return
|
return
|
||||||
@@ -182,10 +177,7 @@ def build_dmg(app_path, destfolder):
|
|||||||
)
|
)
|
||||||
print("Building %s" % dmgname)
|
print("Building %s" % dmgname)
|
||||||
# UDBZ = bzip compression. UDZO (zip compression) was used before, but it compresses much less.
|
# UDBZ = bzip compression. UDZO (zip compression) was used before, but it compresses much less.
|
||||||
print_and_do(
|
print_and_do('hdiutil create "%s" -format UDBZ -nocrossdev -srcdir "%s"' % (op.join(destfolder, dmgname), dmgpath))
|
||||||
'hdiutil create "%s" -format UDBZ -nocrossdev -srcdir "%s"'
|
|
||||||
% (op.join(destfolder, dmgname), dmgpath)
|
|
||||||
)
|
|
||||||
print("Build Complete")
|
print("Build Complete")
|
||||||
|
|
||||||
|
|
||||||
@@ -207,8 +199,7 @@ sysconfig.get_config_h_filename = lambda: op.join(op.dirname(__file__), 'pyconfi
|
|||||||
|
|
||||||
|
|
||||||
def add_to_pythonpath(path):
|
def add_to_pythonpath(path):
|
||||||
"""Adds ``path`` to both ``PYTHONPATH`` env and ``sys.path``.
|
"""Adds ``path`` to both ``PYTHONPATH`` env and ``sys.path``."""
|
||||||
"""
|
|
||||||
abspath = op.abspath(path)
|
abspath = op.abspath(path)
|
||||||
pythonpath = os.environ.get("PYTHONPATH", "")
|
pythonpath = os.environ.get("PYTHONPATH", "")
|
||||||
pathsep = ";" if ISWINDOWS else ":"
|
pathsep = ";" if ISWINDOWS else ":"
|
||||||
@@ -231,9 +222,7 @@ def copy_packages(packages_names, dest, create_links=False, extra_ignores=None):
|
|||||||
create_links = False
|
create_links = False
|
||||||
if not extra_ignores:
|
if not extra_ignores:
|
||||||
extra_ignores = []
|
extra_ignores = []
|
||||||
ignore = shutil.ignore_patterns(
|
ignore = shutil.ignore_patterns(".hg*", "tests", "testdata", "modules", "docs", "locale", *extra_ignores)
|
||||||
".hg*", "tests", "testdata", "modules", "docs", "locale", *extra_ignores
|
|
||||||
)
|
|
||||||
for package_name in packages_names:
|
for package_name in packages_names:
|
||||||
if op.exists(package_name):
|
if op.exists(package_name):
|
||||||
source_path = package_name
|
source_path = package_name
|
||||||
@@ -347,7 +336,6 @@ def read_changelog_file(filename):
|
|||||||
with open(filename, "rt", encoding="utf-8") as fp:
|
with open(filename, "rt", encoding="utf-8") as fp:
|
||||||
contents = fp.read()
|
contents = fp.read()
|
||||||
splitted = re_changelog_header.split(contents)[1:] # the first item is empty
|
splitted = re_changelog_header.split(contents)[1:] # the first item is empty
|
||||||
# splitted = [version1, date1, desc1, version2, date2, ...]
|
|
||||||
result = []
|
result = []
|
||||||
for version, date_str, description in iter_by_three(iter(splitted)):
|
for version, date_str, description in iter_by_three(iter(splitted)):
|
||||||
date = datetime.strptime(date_str, "%Y-%m-%d").date()
|
date = datetime.strptime(date_str, "%Y-%m-%d").date()
|
||||||
@@ -410,8 +398,8 @@ def create_osx_app_structure(
|
|||||||
# `resources`: A list of paths of files or folders going in the "Resources" folder.
|
# `resources`: A list of paths of files or folders going in the "Resources" folder.
|
||||||
# `frameworks`: Same as above for "Frameworks".
|
# `frameworks`: Same as above for "Frameworks".
|
||||||
# `symlink_resources`: If True, will symlink resources into the structure instead of copying them.
|
# `symlink_resources`: If True, will symlink resources into the structure instead of copying them.
|
||||||
app = OSXAppStructure(dest, infoplist)
|
app = OSXAppStructure(dest)
|
||||||
app.create()
|
app.create(infoplist)
|
||||||
app.copy_executable(executable)
|
app.copy_executable(executable)
|
||||||
app.copy_resources(*resources, use_symlinks=symlink_resources)
|
app.copy_resources(*resources, use_symlinks=symlink_resources)
|
||||||
app.copy_frameworks(*frameworks)
|
app.copy_frameworks(*frameworks)
|
||||||
@@ -444,11 +432,10 @@ class OSXFrameworkStructure:
|
|||||||
|
|
||||||
def create_symlinks(self):
|
def create_symlinks(self):
|
||||||
# Only call this after create() and copy_executable()
|
# Only call this after create() and copy_executable()
|
||||||
rel = lambda path: op.relpath(path, self.dest)
|
|
||||||
os.symlink("A", op.join(self.dest, "Versions", "Current"))
|
os.symlink("A", op.join(self.dest, "Versions", "Current"))
|
||||||
os.symlink(rel(self.executablepath), op.join(self.dest, self.executablename))
|
os.symlink(op.relpath(self.executablepath, self.dest), op.join(self.dest, self.executablename))
|
||||||
os.symlink(rel(self.headers), op.join(self.dest, "Headers"))
|
os.symlink(op.relpath(self.headers, self.dest), op.join(self.dest, "Headers"))
|
||||||
os.symlink(rel(self.resources), op.join(self.dest, "Resources"))
|
os.symlink(op.relpath(self.resources, self.dest), op.join(self.dest, "Resources"))
|
||||||
|
|
||||||
def copy_executable(self, executable):
|
def copy_executable(self, executable):
|
||||||
copy(executable, self.executablepath)
|
copy(executable, self.executablepath)
|
||||||
@@ -481,9 +468,7 @@ def copy_embeddable_python_dylib(dst):
|
|||||||
def collect_stdlib_dependencies(script, dest_folder, extra_deps=None):
|
def collect_stdlib_dependencies(script, dest_folder, extra_deps=None):
|
||||||
sysprefix = sys.prefix # could be a virtualenv
|
sysprefix = sys.prefix # could be a virtualenv
|
||||||
basesysprefix = sys.base_prefix # seems to be path to non-virtual sys
|
basesysprefix = sys.base_prefix # seems to be path to non-virtual sys
|
||||||
real_lib_prefix = sysconfig.get_config_var(
|
real_lib_prefix = sysconfig.get_config_var("LIBDEST") # leaving this in case it is neede
|
||||||
"LIBDEST"
|
|
||||||
) # leaving this in case it is neede
|
|
||||||
|
|
||||||
def is_stdlib_path(path):
|
def is_stdlib_path(path):
|
||||||
# A module path is only a stdlib path if it's in either sys.prefix or
|
# A module path is only a stdlib path if it's in either sys.prefix or
|
||||||
@@ -493,11 +478,7 @@ def collect_stdlib_dependencies(script, dest_folder, extra_deps=None):
|
|||||||
return False
|
return False
|
||||||
if "site-package" in path:
|
if "site-package" in path:
|
||||||
return False
|
return False
|
||||||
if not (
|
if not (path.startswith(sysprefix) or path.startswith(basesysprefix) or path.startswith(real_lib_prefix)):
|
||||||
path.startswith(sysprefix)
|
|
||||||
or path.startswith(basesysprefix)
|
|
||||||
or path.startswith(real_lib_prefix)
|
|
||||||
):
|
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@@ -511,9 +492,7 @@ def collect_stdlib_dependencies(script, dest_folder, extra_deps=None):
|
|||||||
relpath = op.relpath(p, real_lib_prefix)
|
relpath = op.relpath(p, real_lib_prefix)
|
||||||
elif p.startswith(sysprefix):
|
elif p.startswith(sysprefix):
|
||||||
relpath = op.relpath(p, sysprefix)
|
relpath = op.relpath(p, sysprefix)
|
||||||
assert relpath.startswith(
|
assert relpath.startswith("lib/python3.") # we want to get rid of that lib/python3.x part
|
||||||
"lib/python3."
|
|
||||||
) # we want to get rid of that lib/python3.x part
|
|
||||||
relpath = relpath[len("lib/python3.X/") :]
|
relpath = relpath[len("lib/python3.X/") :]
|
||||||
elif p.startswith(basesysprefix):
|
elif p.startswith(basesysprefix):
|
||||||
relpath = op.relpath(p, basesysprefix)
|
relpath = op.relpath(p, basesysprefix)
|
||||||
@@ -521,9 +500,7 @@ def collect_stdlib_dependencies(script, dest_folder, extra_deps=None):
|
|||||||
relpath = relpath[len("lib/python3.X/") :]
|
relpath = relpath[len("lib/python3.X/") :]
|
||||||
else:
|
else:
|
||||||
raise AssertionError()
|
raise AssertionError()
|
||||||
if relpath.startswith(
|
if relpath.startswith("lib-dynload"): # We copy .so files in lib-dynload directly in our dest
|
||||||
"lib-dynload"
|
|
||||||
): # We copy .so files in lib-dynload directly in our dest
|
|
||||||
relpath = relpath[len("lib-dynload/") :]
|
relpath = relpath[len("lib-dynload/") :]
|
||||||
if relpath.startswith("encodings") or relpath.startswith("distutils"):
|
if relpath.startswith("encodings") or relpath.startswith("distutils"):
|
||||||
# We force their inclusion later.
|
# We force their inclusion later.
|
||||||
@@ -562,9 +539,7 @@ def fix_qt_resource_file(path):
|
|||||||
fp.write(b"\n".join(lines))
|
fp.write(b"\n".join(lines))
|
||||||
|
|
||||||
|
|
||||||
def build_cocoa_ext(
|
def build_cocoa_ext(extname, dest, source_files, extra_frameworks=(), extra_includes=()):
|
||||||
extname, dest, source_files, extra_frameworks=(), extra_includes=()
|
|
||||||
):
|
|
||||||
extra_link_args = ["-framework", "CoreFoundation", "-framework", "Foundation"]
|
extra_link_args = ["-framework", "CoreFoundation", "-framework", "Foundation"]
|
||||||
for extra in extra_frameworks:
|
for extra in extra_frameworks:
|
||||||
extra_link_args += ["-framework", extra]
|
extra_link_args += ["-framework", extra]
|
||||||
|
|||||||
@@ -11,9 +11,7 @@ from setuptools import setup, Extension
|
|||||||
|
|
||||||
def get_parser():
|
def get_parser():
|
||||||
parser = argparse.ArgumentParser(description="Build an arbitrary Python extension.")
|
parser = argparse.ArgumentParser(description="Build an arbitrary Python extension.")
|
||||||
parser.add_argument(
|
parser.add_argument("source_files", nargs="+", help="List of source files to compile")
|
||||||
"source_files", nargs="+", help="List of source files to compile"
|
|
||||||
)
|
|
||||||
parser.add_argument("name", nargs=1, help="Name of the resulting extension")
|
parser.add_argument("name", nargs=1, help="Name of the resulting extension")
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
@@ -23,7 +21,8 @@ def main():
|
|||||||
print("Building {}...".format(args.name[0]))
|
print("Building {}...".format(args.name[0]))
|
||||||
ext = Extension(args.name[0], args.source_files)
|
ext = Extension(args.name[0], args.source_files)
|
||||||
setup(
|
setup(
|
||||||
script_args=["build_ext", "--inplace"], ext_modules=[ext],
|
script_args=["build_ext", "--inplace"],
|
||||||
|
ext_modules=[ext],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -48,15 +48,13 @@ def get_unconflicted_name(name):
|
|||||||
|
|
||||||
|
|
||||||
def is_conflicted(name):
|
def is_conflicted(name):
|
||||||
"""Returns whether ``name`` is prepended with a bracketed number.
|
"""Returns whether ``name`` is prepended with a bracketed number."""
|
||||||
"""
|
|
||||||
return re_conflict.match(name) is not None
|
return re_conflict.match(name) is not None
|
||||||
|
|
||||||
|
|
||||||
@pathify
|
@pathify
|
||||||
def _smart_move_or_copy(operation, source_path: Path, dest_path: Path):
|
def _smart_move_or_copy(operation, source_path: Path, dest_path: Path):
|
||||||
"""Use move() or copy() to move and copy file with the conflict management.
|
"""Use move() or copy() to move and copy file with the conflict management."""
|
||||||
"""
|
|
||||||
if dest_path.isdir() and not source_path.isdir():
|
if dest_path.isdir() and not source_path.isdir():
|
||||||
dest_path = dest_path[source_path.name]
|
dest_path = dest_path[source_path.name]
|
||||||
if dest_path.exists():
|
if dest_path.exists():
|
||||||
@@ -68,14 +66,12 @@ def _smart_move_or_copy(operation, source_path: Path, dest_path: Path):
|
|||||||
|
|
||||||
|
|
||||||
def smart_move(source_path, dest_path):
|
def smart_move(source_path, dest_path):
|
||||||
"""Same as :func:`smart_copy`, but it moves files instead.
|
"""Same as :func:`smart_copy`, but it moves files instead."""
|
||||||
"""
|
|
||||||
_smart_move_or_copy(shutil.move, source_path, dest_path)
|
_smart_move_or_copy(shutil.move, source_path, dest_path)
|
||||||
|
|
||||||
|
|
||||||
def smart_copy(source_path, dest_path):
|
def smart_copy(source_path, dest_path):
|
||||||
"""Copies ``source_path`` to ``dest_path``, recursively and with conflict resolution.
|
"""Copies ``source_path`` to ``dest_path``, recursively and with conflict resolution."""
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
_smart_move_or_copy(shutil.copy, source_path, dest_path)
|
_smart_move_or_copy(shutil.copy, source_path, dest_path)
|
||||||
except IOError as e:
|
except IOError as e:
|
||||||
|
|||||||
@@ -13,8 +13,8 @@ import traceback
|
|||||||
# Taken from http://bzimmer.ziclix.com/2008/12/17/python-thread-dumps/
|
# Taken from http://bzimmer.ziclix.com/2008/12/17/python-thread-dumps/
|
||||||
def stacktraces():
|
def stacktraces():
|
||||||
code = []
|
code = []
|
||||||
for threadId, stack in sys._current_frames().items():
|
for thread_id, stack in sys._current_frames().items():
|
||||||
code.append("\n# ThreadID: %s" % threadId)
|
code.append("\n# ThreadID: %s" % thread_id)
|
||||||
for filename, lineno, name, line in traceback.extract_stack(stack):
|
for filename, lineno, name, line in traceback.extract_stack(stack):
|
||||||
code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
|
code.append('File: "%s", line %d, in %s' % (filename, lineno, name))
|
||||||
if line:
|
if line:
|
||||||
|
|||||||
@@ -11,29 +11,26 @@ import logging
|
|||||||
|
|
||||||
|
|
||||||
class SpecialFolder:
|
class SpecialFolder:
|
||||||
AppData = 1
|
APPDATA = 1
|
||||||
Cache = 2
|
CACHE = 2
|
||||||
|
|
||||||
|
|
||||||
def open_url(url):
|
def open_url(url):
|
||||||
"""Open ``url`` with the default browser.
|
"""Open ``url`` with the default browser."""
|
||||||
"""
|
|
||||||
_open_url(url)
|
_open_url(url)
|
||||||
|
|
||||||
|
|
||||||
def open_path(path):
|
def open_path(path):
|
||||||
"""Open ``path`` with its associated application.
|
"""Open ``path`` with its associated application."""
|
||||||
"""
|
|
||||||
_open_path(str(path))
|
_open_path(str(path))
|
||||||
|
|
||||||
|
|
||||||
def reveal_path(path):
|
def reveal_path(path):
|
||||||
"""Open the folder containing ``path`` with the default file browser.
|
"""Open the folder containing ``path`` with the default file browser."""
|
||||||
"""
|
|
||||||
_reveal_path(str(path))
|
_reveal_path(str(path))
|
||||||
|
|
||||||
|
|
||||||
def special_folder_path(special_folder, appname=None):
|
def special_folder_path(special_folder, appname=None, portable=False):
|
||||||
"""Returns the path of ``special_folder``.
|
"""Returns the path of ``special_folder``.
|
||||||
|
|
||||||
``special_folder`` is a SpecialFolder.* const. The result is the special folder for the current
|
``special_folder`` is a SpecialFolder.* const. The result is the special folder for the current
|
||||||
@@ -41,7 +38,7 @@ def special_folder_path(special_folder, appname=None):
|
|||||||
|
|
||||||
You can override the application name with ``appname``. This argument is ingored under Qt.
|
You can override the application name with ``appname``. This argument is ingored under Qt.
|
||||||
"""
|
"""
|
||||||
return _special_folder_path(special_folder, appname)
|
return _special_folder_path(special_folder, appname, portable=portable)
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -57,8 +54,8 @@ try:
|
|||||||
_open_path = proxy.openPath_
|
_open_path = proxy.openPath_
|
||||||
_reveal_path = proxy.revealPath_
|
_reveal_path = proxy.revealPath_
|
||||||
|
|
||||||
def _special_folder_path(special_folder, appname=None):
|
def _special_folder_path(special_folder, appname=None, portable=False):
|
||||||
if special_folder == SpecialFolder.Cache:
|
if special_folder == SpecialFolder.CACHE:
|
||||||
base = proxy.getCachePath()
|
base = proxy.getCachePath()
|
||||||
else:
|
else:
|
||||||
base = proxy.getAppdataPath()
|
base = proxy.getAppdataPath()
|
||||||
@@ -71,6 +68,10 @@ except ImportError:
|
|||||||
try:
|
try:
|
||||||
from PyQt5.QtCore import QUrl, QStandardPaths
|
from PyQt5.QtCore import QUrl, QStandardPaths
|
||||||
from PyQt5.QtGui import QDesktopServices
|
from PyQt5.QtGui import QDesktopServices
|
||||||
|
from qtlib.util import get_appdata
|
||||||
|
from core.util import executable_folder
|
||||||
|
from hscommon.plat import ISWINDOWS, ISOSX
|
||||||
|
import subprocess
|
||||||
|
|
||||||
def _open_url(url):
|
def _open_url(url):
|
||||||
QDesktopServices.openUrl(QUrl(url))
|
QDesktopServices.openUrl(QUrl(url))
|
||||||
@@ -80,14 +81,22 @@ except ImportError:
|
|||||||
QDesktopServices.openUrl(url)
|
QDesktopServices.openUrl(url)
|
||||||
|
|
||||||
def _reveal_path(path):
|
def _reveal_path(path):
|
||||||
|
if ISWINDOWS:
|
||||||
|
subprocess.run(["explorer", "/select,", op.abspath(path)])
|
||||||
|
elif ISOSX:
|
||||||
|
subprocess.run(["open", "-R", op.abspath(path)])
|
||||||
|
else:
|
||||||
_open_path(op.dirname(str(path)))
|
_open_path(op.dirname(str(path)))
|
||||||
|
|
||||||
def _special_folder_path(special_folder, appname=None):
|
def _special_folder_path(special_folder, appname=None, portable=False):
|
||||||
if special_folder == SpecialFolder.Cache:
|
if special_folder == SpecialFolder.CACHE:
|
||||||
qtfolder = QStandardPaths.CacheLocation
|
if ISWINDOWS and portable:
|
||||||
|
folder = op.join(executable_folder(), "cache")
|
||||||
else:
|
else:
|
||||||
qtfolder = QStandardPaths.DataLocation
|
folder = QStandardPaths.standardLocations(QStandardPaths.CacheLocation)[0]
|
||||||
return QStandardPaths.standardLocations(qtfolder)[0]
|
else:
|
||||||
|
folder = get_appdata(portable)
|
||||||
|
return folder
|
||||||
|
|
||||||
except ImportError:
|
except ImportError:
|
||||||
# We're either running tests, and these functions don't matter much or we're in a really
|
# We're either running tests, and these functions don't matter much or we're in a really
|
||||||
@@ -95,10 +104,12 @@ except ImportError:
|
|||||||
logging.warning("Can't setup desktop functions!")
|
logging.warning("Can't setup desktop functions!")
|
||||||
|
|
||||||
def _open_path(path):
|
def _open_path(path):
|
||||||
|
# Dummy for tests
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _reveal_path(path):
|
def _reveal_path(path):
|
||||||
|
# Dummy for tests
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _special_folder_path(special_folder, appname=None):
|
def _special_folder_path(special_folder, appname=None, portable=False):
|
||||||
return "/tmp"
|
return "/tmp"
|
||||||
|
|||||||
@@ -149,8 +149,7 @@ class Rect:
|
|||||||
return l1, l2, l3, l4
|
return l1, l2, l3, l4
|
||||||
|
|
||||||
def scaled_rect(self, dx, dy):
|
def scaled_rect(self, dx, dy):
|
||||||
"""Returns a rect that has the same borders at self, but grown/shrunk by dx/dy on each side.
|
"""Returns a rect that has the same borders at self, but grown/shrunk by dx/dy on each side."""
|
||||||
"""
|
|
||||||
x, y, w, h = self
|
x, y, w, h = self
|
||||||
x -= dx
|
x -= dx
|
||||||
y -= dy
|
y -= dy
|
||||||
@@ -159,8 +158,7 @@ class Rect:
|
|||||||
return Rect(x, y, w, h)
|
return Rect(x, y, w, h)
|
||||||
|
|
||||||
def united(self, other):
|
def united(self, other):
|
||||||
"""Returns the bounding rectangle of this rectangle and `other`.
|
"""Returns the bounding rectangle of this rectangle and `other`."""
|
||||||
"""
|
|
||||||
# ul=upper left lr=lower right
|
# ul=upper left lr=lower right
|
||||||
ulcorner1, lrcorner1 = self.corners()
|
ulcorner1, lrcorner1 = self.corners()
|
||||||
ulcorner2, lrcorner2 = other.corners()
|
ulcorner2, lrcorner2 = other.corners()
|
||||||
|
|||||||
@@ -80,8 +80,7 @@ class PrefAccessInterface:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def set_default(self, key, value):
|
def set_default(self, key, value):
|
||||||
"""Set the value ``value`` for ``key`` in the currently running app's preference store.
|
"""Set the value ``value`` for ``key`` in the currently running app's preference store."""
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class Columns(GUIObject):
|
class Columns(GUIObject):
|
||||||
@@ -140,33 +139,27 @@ class Columns(GUIObject):
|
|||||||
|
|
||||||
# --- Public
|
# --- Public
|
||||||
def column_by_index(self, index):
|
def column_by_index(self, index):
|
||||||
"""Return the :class:`Column` having the :attr:`~Column.logical_index` ``index``.
|
"""Return the :class:`Column` having the :attr:`~Column.logical_index` ``index``."""
|
||||||
"""
|
|
||||||
return self.column_list[index]
|
return self.column_list[index]
|
||||||
|
|
||||||
def column_by_name(self, name):
|
def column_by_name(self, name):
|
||||||
"""Return the :class:`Column` having the :attr:`~Column.name` ``name``.
|
"""Return the :class:`Column` having the :attr:`~Column.name` ``name``."""
|
||||||
"""
|
|
||||||
return self.coldata[name]
|
return self.coldata[name]
|
||||||
|
|
||||||
def columns_count(self):
|
def columns_count(self):
|
||||||
"""Returns the number of columns in our set.
|
"""Returns the number of columns in our set."""
|
||||||
"""
|
|
||||||
return len(self.column_list)
|
return len(self.column_list)
|
||||||
|
|
||||||
def column_display(self, colname):
|
def column_display(self, colname):
|
||||||
"""Returns display name for column named ``colname``, or ``''`` if there's none.
|
"""Returns display name for column named ``colname``, or ``''`` if there's none."""
|
||||||
"""
|
|
||||||
return self._get_colname_attr(colname, "display", "")
|
return self._get_colname_attr(colname, "display", "")
|
||||||
|
|
||||||
def column_is_visible(self, colname):
|
def column_is_visible(self, colname):
|
||||||
"""Returns visibility for column named ``colname``, or ``True`` if there's none.
|
"""Returns visibility for column named ``colname``, or ``True`` if there's none."""
|
||||||
"""
|
|
||||||
return self._get_colname_attr(colname, "visible", True)
|
return self._get_colname_attr(colname, "visible", True)
|
||||||
|
|
||||||
def column_width(self, colname):
|
def column_width(self, colname):
|
||||||
"""Returns width for column named ``colname``, or ``0`` if there's none.
|
"""Returns width for column named ``colname``, or ``0`` if there's none."""
|
||||||
"""
|
|
||||||
return self._get_colname_attr(colname, "width", 0)
|
return self._get_colname_attr(colname, "width", 0)
|
||||||
|
|
||||||
def columns_to_right(self, colname):
|
def columns_to_right(self, colname):
|
||||||
@@ -177,11 +170,7 @@ class Columns(GUIObject):
|
|||||||
"""
|
"""
|
||||||
column = self.coldata[colname]
|
column = self.coldata[colname]
|
||||||
index = column.ordered_index
|
index = column.ordered_index
|
||||||
return [
|
return [col.name for col in self.column_list if (col.visible and col.ordered_index > index)]
|
||||||
col.name
|
|
||||||
for col in self.column_list
|
|
||||||
if (col.visible and col.ordered_index > index)
|
|
||||||
]
|
|
||||||
|
|
||||||
def menu_items(self):
|
def menu_items(self):
|
||||||
"""Returns a list of items convenient for quick visibility menu generation.
|
"""Returns a list of items convenient for quick visibility menu generation.
|
||||||
@@ -207,8 +196,7 @@ class Columns(GUIObject):
|
|||||||
self.set_column_order(colnames)
|
self.set_column_order(colnames)
|
||||||
|
|
||||||
def reset_to_defaults(self):
|
def reset_to_defaults(self):
|
||||||
"""Reset all columns' width and visibility to their default values.
|
"""Reset all columns' width and visibility to their default values."""
|
||||||
"""
|
|
||||||
self.set_column_order([col.name for col in self.column_list])
|
self.set_column_order([col.name for col in self.column_list])
|
||||||
for col in self._optional_columns():
|
for col in self._optional_columns():
|
||||||
col.visible = col.default_visible
|
col.visible = col.default_visible
|
||||||
@@ -216,13 +204,11 @@ class Columns(GUIObject):
|
|||||||
self.view.restore_columns()
|
self.view.restore_columns()
|
||||||
|
|
||||||
def resize_column(self, colname, newwidth):
|
def resize_column(self, colname, newwidth):
|
||||||
"""Set column ``colname``'s width to ``newwidth``.
|
"""Set column ``colname``'s width to ``newwidth``."""
|
||||||
"""
|
|
||||||
self._set_colname_attr(colname, "width", newwidth)
|
self._set_colname_attr(colname, "width", newwidth)
|
||||||
|
|
||||||
def restore_columns(self):
|
def restore_columns(self):
|
||||||
"""Restore's column persistent attributes from the last :meth:`save_columns`.
|
"""Restore's column persistent attributes from the last :meth:`save_columns`."""
|
||||||
"""
|
|
||||||
if not (self.prefaccess and self.savename and self.coldata):
|
if not (self.prefaccess and self.savename and self.coldata):
|
||||||
if (not self.savename) and (self.coldata):
|
if (not self.savename) and (self.coldata):
|
||||||
# This is a table that will not have its coldata saved/restored. we should
|
# This is a table that will not have its coldata saved/restored. we should
|
||||||
@@ -241,8 +227,7 @@ class Columns(GUIObject):
|
|||||||
self.view.restore_columns()
|
self.view.restore_columns()
|
||||||
|
|
||||||
def save_columns(self):
|
def save_columns(self):
|
||||||
"""Save column attributes in persistent storage for restoration in :meth:`restore_columns`.
|
"""Save column attributes in persistent storage for restoration in :meth:`restore_columns`."""
|
||||||
"""
|
|
||||||
if not (self.prefaccess and self.savename and self.coldata):
|
if not (self.prefaccess and self.savename and self.coldata):
|
||||||
return
|
return
|
||||||
for col in self.column_list:
|
for col in self.column_list:
|
||||||
@@ -263,15 +248,13 @@ class Columns(GUIObject):
|
|||||||
col.ordered_index = i
|
col.ordered_index = i
|
||||||
|
|
||||||
def set_column_visible(self, colname, visible):
|
def set_column_visible(self, colname, visible):
|
||||||
"""Set the visibility of column ``colname``.
|
"""Set the visibility of column ``colname``."""
|
||||||
"""
|
|
||||||
self.table.save_edits() # the table on the GUI side will stop editing when the columns change
|
self.table.save_edits() # the table on the GUI side will stop editing when the columns change
|
||||||
self._set_colname_attr(colname, "visible", visible)
|
self._set_colname_attr(colname, "visible", visible)
|
||||||
self.view.set_column_visible(colname, visible)
|
self.view.set_column_visible(colname, visible)
|
||||||
|
|
||||||
def set_default_width(self, colname, width):
|
def set_default_width(self, colname, width):
|
||||||
"""Set the default width or column ``colname``.
|
"""Set the default width or column ``colname``."""
|
||||||
"""
|
|
||||||
self._set_colname_attr(colname, "default_width", width)
|
self._set_colname_attr(colname, "default_width", width)
|
||||||
|
|
||||||
def toggle_menu_item(self, index):
|
def toggle_menu_item(self, index):
|
||||||
@@ -289,14 +272,10 @@ class Columns(GUIObject):
|
|||||||
# --- Properties
|
# --- Properties
|
||||||
@property
|
@property
|
||||||
def ordered_columns(self):
|
def ordered_columns(self):
|
||||||
"""List of :class:`Column` in visible order.
|
"""List of :class:`Column` in visible order."""
|
||||||
"""
|
return [col for col in sorted(self.column_list, key=lambda col: col.ordered_index)]
|
||||||
return [
|
|
||||||
col for col in sorted(self.column_list, key=lambda col: col.ordered_index)
|
|
||||||
]
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def colnames(self):
|
def colnames(self):
|
||||||
"""List of column names in visible order.
|
"""List of column names in visible order."""
|
||||||
"""
|
|
||||||
return [col.name for col in self.ordered_columns]
|
return [col.name for col in self.ordered_columns]
|
||||||
|
|||||||
@@ -21,12 +21,10 @@ class ProgressWindowView:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def show(self):
|
def show(self):
|
||||||
"""Show the dialog.
|
"""Show the dialog."""
|
||||||
"""
|
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Close the dialog.
|
"""Close the dialog."""
|
||||||
"""
|
|
||||||
|
|
||||||
def set_progress(self, progress):
|
def set_progress(self, progress):
|
||||||
"""Set the progress of the progress bar to ``progress``.
|
"""Set the progress of the progress bar to ``progress``.
|
||||||
@@ -76,8 +74,7 @@ class ProgressWindow(GUIObject, ThreadedJobPerformer):
|
|||||||
self.jobid = None
|
self.jobid = None
|
||||||
|
|
||||||
def cancel(self):
|
def cancel(self):
|
||||||
"""Call for a user-initiated job cancellation.
|
"""Call for a user-initiated job cancellation."""
|
||||||
"""
|
|
||||||
# The UI is sometimes a bit buggy and calls cancel() on self.view.close(). We just want to
|
# The UI is sometimes a bit buggy and calls cancel() on self.view.close(). We just want to
|
||||||
# make sure that this doesn't lead us to think that the user acually cancelled the task, so
|
# make sure that this doesn't lead us to think that the user acually cancelled the task, so
|
||||||
# we verify that the job is still running.
|
# we verify that the job is still running.
|
||||||
|
|||||||
@@ -27,9 +27,7 @@ class Selectable(Sequence):
|
|||||||
self._selected_indexes = []
|
self._selected_indexes = []
|
||||||
if not self._selected_indexes:
|
if not self._selected_indexes:
|
||||||
return
|
return
|
||||||
self._selected_indexes = [
|
self._selected_indexes = [index for index in self._selected_indexes if index < len(self)]
|
||||||
index for index in self._selected_indexes if index < len(self)
|
|
||||||
]
|
|
||||||
if not self._selected_indexes:
|
if not self._selected_indexes:
|
||||||
self._selected_indexes = [len(self) - 1]
|
self._selected_indexes = [len(self) - 1]
|
||||||
|
|
||||||
|
|||||||
@@ -97,8 +97,7 @@ class Table(MutableSequence, Selectable):
|
|||||||
self._rows.pop(0)
|
self._rows.pop(0)
|
||||||
if self._footer is not None:
|
if self._footer is not None:
|
||||||
self._rows.pop()
|
self._rows.pop()
|
||||||
key = lambda row: row.sort_key_for_column(column_name)
|
self._rows.sort(key=lambda row: row.sort_key_for_column(column_name), reverse=desc)
|
||||||
self._rows.sort(key=key, reverse=desc)
|
|
||||||
if self._header is not None:
|
if self._header is not None:
|
||||||
self._rows.insert(0, self._header)
|
self._rows.insert(0, self._header)
|
||||||
if self._footer is not None:
|
if self._footer is not None:
|
||||||
@@ -277,8 +276,7 @@ class GUITable(Table, GUIObject):
|
|||||||
raise NotImplementedError()
|
raise NotImplementedError()
|
||||||
|
|
||||||
def _do_delete(self):
|
def _do_delete(self):
|
||||||
"""(Virtual) Delete the selected rows.
|
"""(Virtual) Delete the selected rows."""
|
||||||
"""
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def _fill(self):
|
def _fill(self):
|
||||||
|
|||||||
@@ -71,8 +71,7 @@ class TextField(GUIObject):
|
|||||||
|
|
||||||
# --- Public
|
# --- Public
|
||||||
def refresh(self):
|
def refresh(self):
|
||||||
"""Triggers a view :meth:`~TextFieldView.refresh`.
|
"""Triggers a view :meth:`~TextFieldView.refresh`."""
|
||||||
"""
|
|
||||||
self.view.refresh()
|
self.view.refresh()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -55,8 +55,7 @@ class Node(MutableSequence):
|
|||||||
|
|
||||||
# --- Public
|
# --- Public
|
||||||
def clear(self):
|
def clear(self):
|
||||||
"""Clears the node of all its children.
|
"""Clears the node of all its children."""
|
||||||
"""
|
|
||||||
del self[:]
|
del self[:]
|
||||||
|
|
||||||
def find(self, predicate, include_self=True):
|
def find(self, predicate, include_self=True):
|
||||||
@@ -103,14 +102,12 @@ class Node(MutableSequence):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def children_count(self):
|
def children_count(self):
|
||||||
"""Same as ``len(self)``.
|
"""Same as ``len(self)``."""
|
||||||
"""
|
|
||||||
return len(self)
|
return len(self)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
"""Name for the node, supplied on init.
|
"""Name for the node, supplied on init."""
|
||||||
"""
|
|
||||||
return self._name
|
return self._name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -56,8 +56,7 @@ class Job:
|
|||||||
|
|
||||||
# ---Private
|
# ---Private
|
||||||
def _subjob_callback(self, progress, desc=""):
|
def _subjob_callback(self, progress, desc=""):
|
||||||
"""This is the callback passed to children jobs.
|
"""This is the callback passed to children jobs."""
|
||||||
"""
|
|
||||||
self.set_progress(progress, desc)
|
self.set_progress(progress, desc)
|
||||||
return True # if JobCancelled has to be raised, it will be at the highest level
|
return True # if JobCancelled has to be raised, it will be at the highest level
|
||||||
|
|
||||||
@@ -147,24 +146,29 @@ class Job:
|
|||||||
|
|
||||||
class NullJob:
|
class NullJob:
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
|
# Null job does nothing
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def add_progress(self, *args, **kwargs):
|
def add_progress(self, *args, **kwargs):
|
||||||
|
# Null job does nothing
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def check_if_cancelled(self):
|
def check_if_cancelled(self):
|
||||||
|
# Null job does nothing
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def iter_with_progress(self, sequence, *args, **kwargs):
|
def iter_with_progress(self, sequence, *args, **kwargs):
|
||||||
return iter(sequence)
|
return iter(sequence)
|
||||||
|
|
||||||
def start_job(self, *args, **kwargs):
|
def start_job(self, *args, **kwargs):
|
||||||
|
# Null job does nothing
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def start_subjob(self, *args, **kwargs):
|
def start_subjob(self, *args, **kwargs):
|
||||||
return NullJob()
|
return NullJob()
|
||||||
|
|
||||||
def set_progress(self, *args, **kwargs):
|
def set_progress(self, *args, **kwargs):
|
||||||
|
# Null job does nothing
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -21,6 +21,8 @@ PO2COCOA = {
|
|||||||
|
|
||||||
COCOA2PO = {v: k for k, v in PO2COCOA.items()}
|
COCOA2PO = {v: k for k, v in PO2COCOA.items()}
|
||||||
|
|
||||||
|
STRING_EXT = ".strings"
|
||||||
|
|
||||||
|
|
||||||
def get_langs(folder):
|
def get_langs(folder):
|
||||||
return [name for name in os.listdir(folder) if op.isdir(op.join(folder, name))]
|
return [name for name in os.listdir(folder) if op.isdir(op.join(folder, name))]
|
||||||
@@ -152,11 +154,9 @@ def strings2pot(target, dest):
|
|||||||
|
|
||||||
|
|
||||||
def allstrings2pot(lprojpath, dest, excludes=None):
|
def allstrings2pot(lprojpath, dest, excludes=None):
|
||||||
allstrings = files_with_ext(lprojpath, ".strings")
|
allstrings = files_with_ext(lprojpath, STRING_EXT)
|
||||||
if excludes:
|
if excludes:
|
||||||
allstrings = [
|
allstrings = [p for p in allstrings if op.splitext(op.basename(p))[0] not in excludes]
|
||||||
p for p in allstrings if op.splitext(op.basename(p))[0] not in excludes
|
|
||||||
]
|
|
||||||
for strings_path in allstrings:
|
for strings_path in allstrings:
|
||||||
strings2pot(strings_path, dest)
|
strings2pot(strings_path, dest)
|
||||||
|
|
||||||
@@ -195,11 +195,7 @@ def generate_cocoa_strings_from_code(code_folder, dest_folder):
|
|||||||
# genstrings produces utf-16 files with comments. After having generated the files, we convert
|
# genstrings produces utf-16 files with comments. After having generated the files, we convert
|
||||||
# them to utf-8 and remove the comments.
|
# them to utf-8 and remove the comments.
|
||||||
ensure_empty_folder(dest_folder)
|
ensure_empty_folder(dest_folder)
|
||||||
print_and_do(
|
print_and_do('genstrings -o "{}" `find "{}" -name *.m | xargs`'.format(dest_folder, code_folder))
|
||||||
'genstrings -o "{}" `find "{}" -name *.m | xargs`'.format(
|
|
||||||
dest_folder, code_folder
|
|
||||||
)
|
|
||||||
)
|
|
||||||
for stringsfile in os.listdir(dest_folder):
|
for stringsfile in os.listdir(dest_folder):
|
||||||
stringspath = op.join(dest_folder, stringsfile)
|
stringspath = op.join(dest_folder, stringsfile)
|
||||||
with open(stringspath, "rt", encoding="utf-16") as fp:
|
with open(stringspath, "rt", encoding="utf-16") as fp:
|
||||||
@@ -214,11 +210,9 @@ def generate_cocoa_strings_from_code(code_folder, dest_folder):
|
|||||||
|
|
||||||
|
|
||||||
def generate_cocoa_strings_from_xib(xib_folder):
|
def generate_cocoa_strings_from_xib(xib_folder):
|
||||||
xibs = [
|
xibs = [op.join(xib_folder, fn) for fn in os.listdir(xib_folder) if fn.endswith(".xib")]
|
||||||
op.join(xib_folder, fn) for fn in os.listdir(xib_folder) if fn.endswith(".xib")
|
|
||||||
]
|
|
||||||
for xib in xibs:
|
for xib in xibs:
|
||||||
dest = xib.replace(".xib", ".strings")
|
dest = xib.replace(".xib", STRING_EXT)
|
||||||
print_and_do("ibtool {} --generate-strings-file {}".format(xib, dest))
|
print_and_do("ibtool {} --generate-strings-file {}".format(xib, dest))
|
||||||
print_and_do("iconv -f utf-16 -t utf-8 {0} | tee {0}".format(dest))
|
print_and_do("iconv -f utf-16 -t utf-8 {0} | tee {0}".format(dest))
|
||||||
|
|
||||||
@@ -234,10 +228,6 @@ def localize_stringsfile(stringsfile, dest_root_folder):
|
|||||||
|
|
||||||
|
|
||||||
def localize_all_stringsfiles(src_folder, dest_root_folder):
|
def localize_all_stringsfiles(src_folder, dest_root_folder):
|
||||||
stringsfiles = [
|
stringsfiles = [op.join(src_folder, fn) for fn in os.listdir(src_folder) if fn.endswith(STRING_EXT)]
|
||||||
op.join(src_folder, fn)
|
|
||||||
for fn in os.listdir(src_folder)
|
|
||||||
if fn.endswith(".strings")
|
|
||||||
]
|
|
||||||
for path in stringsfiles:
|
for path in stringsfiles:
|
||||||
localize_stringsfile(path, dest_root_folder)
|
localize_stringsfile(path, dest_root_folder)
|
||||||
|
|||||||
@@ -16,8 +16,7 @@ from collections import defaultdict
|
|||||||
|
|
||||||
|
|
||||||
class Broadcaster:
|
class Broadcaster:
|
||||||
"""Broadcasts messages that are received by all listeners.
|
"""Broadcasts messages that are received by all listeners."""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.listeners = set()
|
self.listeners = set()
|
||||||
@@ -39,8 +38,7 @@ class Broadcaster:
|
|||||||
|
|
||||||
|
|
||||||
class Listener:
|
class Listener:
|
||||||
"""A listener is initialized with the broadcaster it's going to listen to. Initially, it is not connected.
|
"""A listener is initialized with the broadcaster it's going to listen to. Initially, it is not connected."""
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, broadcaster):
|
def __init__(self, broadcaster):
|
||||||
self.broadcaster = broadcaster
|
self.broadcaster = broadcaster
|
||||||
@@ -57,13 +55,11 @@ class Listener:
|
|||||||
self._bound_notifications[message].append(func)
|
self._bound_notifications[message].append(func)
|
||||||
|
|
||||||
def connect(self):
|
def connect(self):
|
||||||
"""Connects the listener to its broadcaster.
|
"""Connects the listener to its broadcaster."""
|
||||||
"""
|
|
||||||
self.broadcaster.add_listener(self)
|
self.broadcaster.add_listener(self)
|
||||||
|
|
||||||
def disconnect(self):
|
def disconnect(self):
|
||||||
"""Disconnects the listener from its broadcaster.
|
"""Disconnects the listener from its broadcaster."""
|
||||||
"""
|
|
||||||
self.broadcaster.remove_listener(self)
|
self.broadcaster.remove_listener(self)
|
||||||
|
|
||||||
def dispatch(self, msg):
|
def dispatch(self, msg):
|
||||||
|
|||||||
@@ -85,9 +85,7 @@ class Path(tuple):
|
|||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
if isinstance(key, slice):
|
if isinstance(key, slice):
|
||||||
if isinstance(key.start, Path):
|
if isinstance(key.start, Path):
|
||||||
equal_elems = list(
|
equal_elems = list(takewhile(lambda pair: pair[0] == pair[1], zip(self, key.start)))
|
||||||
takewhile(lambda pair: pair[0] == pair[1], zip(self, key.start))
|
|
||||||
)
|
|
||||||
key = slice(len(equal_elems), key.stop, key.step)
|
key = slice(len(equal_elems), key.stop, key.step)
|
||||||
if isinstance(key.stop, Path):
|
if isinstance(key.stop, Path):
|
||||||
equal_elems = list(
|
equal_elems = list(
|
||||||
@@ -226,9 +224,7 @@ def pathify(f):
|
|||||||
Calling ``foo('/bar', 0)`` will convert ``'/bar'`` to ``Path('/bar')``.
|
Calling ``foo('/bar', 0)`` will convert ``'/bar'`` to ``Path('/bar')``.
|
||||||
"""
|
"""
|
||||||
sig = signature(f)
|
sig = signature(f)
|
||||||
pindexes = {
|
pindexes = {i for i, p in enumerate(sig.parameters.values()) if p.annotation is Path}
|
||||||
i for i, p in enumerate(sig.parameters.values()) if p.annotation is Path
|
|
||||||
}
|
|
||||||
pkeys = {k: v for k, v in sig.parameters.items() if v.annotation is Path}
|
pkeys = {k: v for k, v in sig.parameters.items() if v.annotation is Path}
|
||||||
|
|
||||||
def path_or_none(p):
|
def path_or_none(p):
|
||||||
@@ -236,9 +232,7 @@ def pathify(f):
|
|||||||
|
|
||||||
@wraps(f)
|
@wraps(f)
|
||||||
def wrapped(*args, **kwargs):
|
def wrapped(*args, **kwargs):
|
||||||
args = tuple(
|
args = tuple((path_or_none(a) if i in pindexes else a) for i, a in enumerate(args))
|
||||||
(path_or_none(a) if i in pindexes else a) for i, a in enumerate(args)
|
|
||||||
)
|
|
||||||
kwargs = {k: (path_or_none(v) if k in pkeys else v) for k, v in kwargs.items()}
|
kwargs = {k: (path_or_none(v) if k in pkeys else v) for k, v in kwargs.items()}
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
|
|
||||||
@@ -246,8 +240,7 @@ def pathify(f):
|
|||||||
|
|
||||||
|
|
||||||
def log_io_error(func):
|
def log_io_error(func):
|
||||||
""" Catches OSError, IOError and WindowsError and log them
|
"""Catches OSError, IOError and WindowsError and log them"""
|
||||||
"""
|
|
||||||
|
|
||||||
@wraps(func)
|
@wraps(func)
|
||||||
def wrapper(path, *args, **kwargs):
|
def wrapper(path, *args, **kwargs):
|
||||||
|
|||||||
@@ -110,22 +110,14 @@ def _visit_pyfiles(list, dirname, names):
|
|||||||
# get extension for python source files
|
# get extension for python source files
|
||||||
if "_py_ext" not in globals():
|
if "_py_ext" not in globals():
|
||||||
global _py_ext
|
global _py_ext
|
||||||
_py_ext = [
|
_py_ext = [triple[0] for triple in imp.get_suffixes() if triple[2] == imp.PY_SOURCE][0]
|
||||||
triple[0] for triple in imp.get_suffixes() if triple[2] == imp.PY_SOURCE
|
|
||||||
][0]
|
|
||||||
|
|
||||||
# don't recurse into CVS directories
|
# don't recurse into CVS directories
|
||||||
if "CVS" in names:
|
if "CVS" in names:
|
||||||
names.remove("CVS")
|
names.remove("CVS")
|
||||||
|
|
||||||
# add all *.py files to list
|
# add all *.py files to list
|
||||||
list.extend(
|
list.extend([os.path.join(dirname, file) for file in names if os.path.splitext(file)[1] == _py_ext])
|
||||||
[
|
|
||||||
os.path.join(dirname, file)
|
|
||||||
for file in names
|
|
||||||
if os.path.splitext(file)[1] == _py_ext
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_modpkg_path(dotted_name, pathlist=None):
|
def _get_modpkg_path(dotted_name, pathlist=None):
|
||||||
@@ -175,10 +167,10 @@ def getFilesForName(name):
|
|||||||
# check for glob chars
|
# check for glob chars
|
||||||
if containsAny(name, "*?[]"):
|
if containsAny(name, "*?[]"):
|
||||||
files = glob.glob(name)
|
files = glob.glob(name)
|
||||||
list = []
|
file_list = []
|
||||||
for file in files:
|
for file in files:
|
||||||
list.extend(getFilesForName(file))
|
file_list.extend(getFilesForName(file))
|
||||||
return list
|
return file_list
|
||||||
|
|
||||||
# try to find module or package
|
# try to find module or package
|
||||||
name = _get_modpkg_path(name)
|
name = _get_modpkg_path(name)
|
||||||
@@ -187,9 +179,9 @@ def getFilesForName(name):
|
|||||||
|
|
||||||
if os.path.isdir(name):
|
if os.path.isdir(name):
|
||||||
# find all python files in directory
|
# find all python files in directory
|
||||||
list = []
|
file_list = []
|
||||||
os.walk(name, _visit_pyfiles, list)
|
os.walk(name, _visit_pyfiles, file_list)
|
||||||
return list
|
return file_list
|
||||||
elif os.path.exists(name):
|
elif os.path.exists(name):
|
||||||
# a single file
|
# a single file
|
||||||
return [name]
|
return [name]
|
||||||
@@ -406,8 +398,7 @@ def main(source_files, outpath, keywords=None):
|
|||||||
eater(*_token)
|
eater(*_token)
|
||||||
except tokenize.TokenError as e:
|
except tokenize.TokenError as e:
|
||||||
print(
|
print(
|
||||||
"%s: %s, line %d, column %d"
|
"%s: %s, line %d, column %d" % (e.args[0], filename, e.args[1][0], e.args[1][1]),
|
||||||
% (e.args[0], filename, e.args[1][0], e.args[1][1]),
|
|
||||||
file=sys.stderr,
|
file=sys.stderr,
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
|
|||||||
@@ -4,13 +4,11 @@
|
|||||||
# which should be included with this package. The terms are also available at
|
# which should be included with this package. The terms are also available at
|
||||||
# http://www.gnu.org/licenses/gpl-3.0.html
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
||||||
|
|
||||||
import os.path as op
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from distutils.version import LooseVersion
|
|
||||||
from pkg_resources import load_entry_point, get_distribution
|
|
||||||
|
|
||||||
from .build import read_changelog_file, filereplace
|
from .build import read_changelog_file, filereplace
|
||||||
|
from sphinx.cmd.build import build_main as sphinx_build
|
||||||
|
|
||||||
CHANGELOG_FORMAT = """
|
CHANGELOG_FORMAT = """
|
||||||
{version} ({date})
|
{version} ({date})
|
||||||
@@ -24,9 +22,7 @@ def tixgen(tixurl):
|
|||||||
"""This is a filter *generator*. tixurl is a url pattern for the tix with a {0} placeholder
|
"""This is a filter *generator*. tixurl is a url pattern for the tix with a {0} placeholder
|
||||||
for the tix #
|
for the tix #
|
||||||
"""
|
"""
|
||||||
urlpattern = tixurl.format(
|
urlpattern = tixurl.format("\\1") # will be replaced buy the content of the first group in re
|
||||||
"\\1"
|
|
||||||
) # will be replaced buy the content of the first group in re
|
|
||||||
R = re.compile(r"#(\d+)")
|
R = re.compile(r"#(\d+)")
|
||||||
repl = "`#\\1 <{}>`__".format(urlpattern)
|
repl = "`#\\1 <{}>`__".format(urlpattern)
|
||||||
return lambda text: R.sub(repl, text)
|
return lambda text: R.sub(repl, text)
|
||||||
@@ -52,9 +48,9 @@ def gen(
|
|||||||
if confrepl is None:
|
if confrepl is None:
|
||||||
confrepl = {}
|
confrepl = {}
|
||||||
if confpath is None:
|
if confpath is None:
|
||||||
confpath = op.join(basepath, "conf.tmpl")
|
confpath = Path(basepath, "conf.tmpl")
|
||||||
if changelogtmpl is None:
|
if changelogtmpl is None:
|
||||||
changelogtmpl = op.join(basepath, "changelog.tmpl")
|
changelogtmpl = Path(basepath, "changelog.tmpl")
|
||||||
changelog = read_changelog_file(changelogpath)
|
changelog = read_changelog_file(changelogpath)
|
||||||
tix = tixgen(tixurl)
|
tix = tixgen(tixurl)
|
||||||
rendered_logs = []
|
rendered_logs = []
|
||||||
@@ -63,36 +59,16 @@ def gen(
|
|||||||
# The format of the changelog descriptions is in markdown, but since we only use bulled list
|
# The format of the changelog descriptions is in markdown, but since we only use bulled list
|
||||||
# and links, it's not worth depending on the markdown package. A simple regexp suffice.
|
# and links, it's not worth depending on the markdown package. A simple regexp suffice.
|
||||||
description = re.sub(r"\[(.*?)\]\((.*?)\)", "`\\1 <\\2>`__", description)
|
description = re.sub(r"\[(.*?)\]\((.*?)\)", "`\\1 <\\2>`__", description)
|
||||||
rendered = CHANGELOG_FORMAT.format(
|
rendered = CHANGELOG_FORMAT.format(version=log["version"], date=log["date_str"], description=description)
|
||||||
version=log["version"], date=log["date_str"], description=description
|
|
||||||
)
|
|
||||||
rendered_logs.append(rendered)
|
rendered_logs.append(rendered)
|
||||||
confrepl["version"] = changelog[0]["version"]
|
confrepl["version"] = changelog[0]["version"]
|
||||||
changelog_out = op.join(basepath, "changelog.rst")
|
changelog_out = Path(basepath, "changelog.rst")
|
||||||
filereplace(changelogtmpl, changelog_out, changelog="\n".join(rendered_logs))
|
filereplace(changelogtmpl, changelog_out, changelog="\n".join(rendered_logs))
|
||||||
if op.exists(confpath):
|
if Path(confpath).exists():
|
||||||
conf_out = op.join(basepath, "conf.py")
|
conf_out = Path(basepath, "conf.py")
|
||||||
filereplace(confpath, conf_out, **confrepl)
|
filereplace(confpath, conf_out, **confrepl)
|
||||||
if LooseVersion(get_distribution("sphinx").version) >= LooseVersion("1.7.0"):
|
|
||||||
from sphinx.cmd.build import build_main as sphinx_build
|
|
||||||
|
|
||||||
# Call the sphinx_build function, which is the same as doing sphinx-build from cli
|
# Call the sphinx_build function, which is the same as doing sphinx-build from cli
|
||||||
try:
|
try:
|
||||||
sphinx_build([basepath, destpath])
|
sphinx_build([str(basepath), str(destpath)])
|
||||||
except SystemExit:
|
except SystemExit:
|
||||||
print(
|
print("Sphinx called sys.exit(), but we're cancelling it because we don't actually want to exit")
|
||||||
"Sphinx called sys.exit(), but we're cancelling it because we don't actually want to exit"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# We used to call sphinx-build with print_and_do(), but the problem was that the virtualenv
|
|
||||||
# of the calling python wasn't correctly considered and caused problems with documentation
|
|
||||||
# relying on autodoc (which tries to import the module to auto-document, but fail because of
|
|
||||||
# missing dependencies which are in the virtualenv). Here, we do exactly what is done when
|
|
||||||
# calling the command from bash.
|
|
||||||
cmd = load_entry_point("Sphinx", "console_scripts", "sphinx-build")
|
|
||||||
try:
|
|
||||||
cmd(["sphinx-build", basepath, destpath])
|
|
||||||
except SystemExit:
|
|
||||||
print(
|
|
||||||
"Sphinx called sys.exit(), but we're cancelling it because we don't actually want to exit"
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ class FakeCursor(list):
|
|||||||
|
|
||||||
|
|
||||||
class _ActualThread(threading.Thread):
|
class _ActualThread(threading.Thread):
|
||||||
""" We can't use this class directly because thread object are not automatically freed when
|
"""We can't use this class directly because thread object are not automatically freed when
|
||||||
nothing refers to it, making it hang the application if not explicitely closed.
|
nothing refers to it, making it hang the application if not explicitely closed.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ from ..path import Path
|
|||||||
from ..testutil import eq_
|
from ..testutil import eq_
|
||||||
|
|
||||||
|
|
||||||
class TestCase_GetConflictedName:
|
class TestCaseGetConflictedName:
|
||||||
def test_simple(self):
|
def test_simple(self):
|
||||||
name = get_conflicted_name(["bar"], "bar")
|
name = get_conflicted_name(["bar"], "bar")
|
||||||
eq_("[000] bar", name)
|
eq_("[000] bar", name)
|
||||||
@@ -46,7 +46,7 @@ class TestCase_GetConflictedName:
|
|||||||
eq_("[000] bar", name)
|
eq_("[000] bar", name)
|
||||||
|
|
||||||
|
|
||||||
class TestCase_GetUnconflictedName:
|
class TestCaseGetUnconflictedName:
|
||||||
def test_main(self):
|
def test_main(self):
|
||||||
eq_("foobar", get_unconflicted_name("[000] foobar"))
|
eq_("foobar", get_unconflicted_name("[000] foobar"))
|
||||||
eq_("foobar", get_unconflicted_name("[9999] foobar"))
|
eq_("foobar", get_unconflicted_name("[9999] foobar"))
|
||||||
@@ -56,7 +56,7 @@ class TestCase_GetUnconflictedName:
|
|||||||
eq_("foo [000] bar", get_unconflicted_name("foo [000] bar"))
|
eq_("foo [000] bar", get_unconflicted_name("foo [000] bar"))
|
||||||
|
|
||||||
|
|
||||||
class TestCase_IsConflicted:
|
class TestCaseIsConflicted:
|
||||||
def test_main(self):
|
def test_main(self):
|
||||||
assert is_conflicted("[000] foobar")
|
assert is_conflicted("[000] foobar")
|
||||||
assert is_conflicted("[9999] foobar")
|
assert is_conflicted("[9999] foobar")
|
||||||
@@ -66,7 +66,7 @@ class TestCase_IsConflicted:
|
|||||||
assert not is_conflicted("foo [000] bar")
|
assert not is_conflicted("foo [000] bar")
|
||||||
|
|
||||||
|
|
||||||
class TestCase_move_copy:
|
class TestCaseMoveCopy:
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def do_setup(self, request):
|
def do_setup(self, request):
|
||||||
tmpdir = request.getfixturevalue("tmpdir")
|
tmpdir = request.getfixturevalue("tmpdir")
|
||||||
@@ -80,9 +80,7 @@ class TestCase_move_copy:
|
|||||||
assert self.path["baz"].exists()
|
assert self.path["baz"].exists()
|
||||||
assert not self.path["foo"].exists()
|
assert not self.path["foo"].exists()
|
||||||
|
|
||||||
def test_copy_no_conflict(
|
def test_copy_no_conflict(self, do_setup): # No need to duplicate the rest of the tests... Let's just test on move
|
||||||
self, do_setup
|
|
||||||
): # No need to duplicate the rest of the tests... Let's just test on move
|
|
||||||
smart_copy(self.path + "foo", self.path + "baz")
|
smart_copy(self.path + "foo", self.path + "baz")
|
||||||
assert self.path["baz"].exists()
|
assert self.path["baz"].exists()
|
||||||
assert self.path["foo"].exists()
|
assert self.path["foo"].exists()
|
||||||
|
|||||||
@@ -128,9 +128,7 @@ def test_repeater_with_repeated_notifications():
|
|||||||
r.connect()
|
r.connect()
|
||||||
listener.connect()
|
listener.connect()
|
||||||
b.notify("hello")
|
b.notify("hello")
|
||||||
b.notify(
|
b.notify("foo") # if the repeater repeated this notif, we'd get a crash on HelloListener
|
||||||
"foo"
|
|
||||||
) # if the repeater repeated this notif, we'd get a crash on HelloListener
|
|
||||||
eq_(r.hello_count, 1)
|
eq_(r.hello_count, 1)
|
||||||
eq_(listener.hello_count, 1)
|
eq_(listener.hello_count, 1)
|
||||||
eq_(r.foo_count, 1)
|
eq_(r.foo_count, 1)
|
||||||
|
|||||||
@@ -51,7 +51,7 @@ def test_init_with_tuple_and_list(force_ossep):
|
|||||||
|
|
||||||
def test_init_with_invalid_value(force_ossep):
|
def test_init_with_invalid_value(force_ossep):
|
||||||
try:
|
try:
|
||||||
path = Path(42) # noqa: F841
|
Path(42)
|
||||||
assert False
|
assert False
|
||||||
except TypeError:
|
except TypeError:
|
||||||
pass
|
pass
|
||||||
@@ -87,8 +87,7 @@ def test_filename(force_ossep):
|
|||||||
|
|
||||||
|
|
||||||
def test_deal_with_empty_components(force_ossep):
|
def test_deal_with_empty_components(force_ossep):
|
||||||
"""Keep ONLY a leading space, which means we want a leading slash.
|
"""Keep ONLY a leading space, which means we want a leading slash."""
|
||||||
"""
|
|
||||||
eq_("foo//bar", str(Path(("foo", "", "bar"))))
|
eq_("foo//bar", str(Path(("foo", "", "bar"))))
|
||||||
eq_("/foo/bar", str(Path(("", "foo", "bar"))))
|
eq_("/foo/bar", str(Path(("", "foo", "bar"))))
|
||||||
eq_("foo/bar", str(Path("foo/bar/")))
|
eq_("foo/bar", str(Path("foo/bar/")))
|
||||||
@@ -143,8 +142,6 @@ def test_path_slice(force_ossep):
|
|||||||
eq_((), foobar[:foobar])
|
eq_((), foobar[:foobar])
|
||||||
abcd = Path("a/b/c/d")
|
abcd = Path("a/b/c/d")
|
||||||
a = Path("a")
|
a = Path("a")
|
||||||
b = Path("b") # noqa: #F841
|
|
||||||
c = Path("c") # noqa: #F841
|
|
||||||
d = Path("d")
|
d = Path("d")
|
||||||
z = Path("z")
|
z = Path("z")
|
||||||
eq_("b/c", abcd[a:d])
|
eq_("b/c", abcd[a:d])
|
||||||
@@ -154,8 +151,7 @@ def test_path_slice(force_ossep):
|
|||||||
|
|
||||||
|
|
||||||
def test_add_with_root_path(force_ossep):
|
def test_add_with_root_path(force_ossep):
|
||||||
"""if I perform /a/b/c + /d/e/f, I want /a/b/c/d/e/f, not /a/b/c//d/e/f
|
"""if I perform /a/b/c + /d/e/f, I want /a/b/c/d/e/f, not /a/b/c//d/e/f"""
|
||||||
"""
|
|
||||||
eq_("/foo/bar", str(Path("/foo") + Path("/bar")))
|
eq_("/foo/bar", str(Path("/foo") + Path("/bar")))
|
||||||
|
|
||||||
|
|
||||||
@@ -166,8 +162,7 @@ def test_create_with_tuple_that_have_slash_inside(force_ossep, monkeypatch):
|
|||||||
|
|
||||||
|
|
||||||
def test_auto_decode_os_sep(force_ossep, monkeypatch):
|
def test_auto_decode_os_sep(force_ossep, monkeypatch):
|
||||||
"""Path should decode any either / or os.sep, but always encode in os.sep.
|
"""Path should decode any either / or os.sep, but always encode in os.sep."""
|
||||||
"""
|
|
||||||
eq_(("foo\\bar", "bleh"), Path("foo\\bar/bleh"))
|
eq_(("foo\\bar", "bleh"), Path("foo\\bar/bleh"))
|
||||||
monkeypatch.setattr(os, "sep", "\\")
|
monkeypatch.setattr(os, "sep", "\\")
|
||||||
eq_(("foo", "bar/bleh"), Path("foo\\bar/bleh"))
|
eq_(("foo", "bar/bleh"), Path("foo\\bar/bleh"))
|
||||||
@@ -219,7 +214,7 @@ def test_str_repr_of_mix_between_non_ascii_str_and_unicode(force_ossep):
|
|||||||
eq_("foo\u00e9/bar".encode(sys.getfilesystemencoding()), p.tobytes())
|
eq_("foo\u00e9/bar".encode(sys.getfilesystemencoding()), p.tobytes())
|
||||||
|
|
||||||
|
|
||||||
def test_Path_of_a_Path_returns_self(force_ossep):
|
def test_path_of_a_path_returns_self(force_ossep):
|
||||||
# if Path() is called with a path as value, just return value.
|
# if Path() is called with a path as value, just return value.
|
||||||
p = Path("foo/bar")
|
p = Path("foo/bar")
|
||||||
assert Path(p) is p
|
assert Path(p) is p
|
||||||
|
|||||||
@@ -44,9 +44,7 @@ def test_guicalls():
|
|||||||
# A GUISelectableList appropriately calls its view.
|
# A GUISelectableList appropriately calls its view.
|
||||||
sl = GUISelectableList(["foo", "bar"])
|
sl = GUISelectableList(["foo", "bar"])
|
||||||
sl.view = CallLogger()
|
sl.view = CallLogger()
|
||||||
sl.view.check_gui_calls(
|
sl.view.check_gui_calls(["refresh"]) # Upon setting the view, we get a call to refresh()
|
||||||
["refresh"]
|
|
||||||
) # Upon setting the view, we get a call to refresh()
|
|
||||||
sl[1] = "baz"
|
sl[1] = "baz"
|
||||||
sl.view.check_gui_calls(["refresh"])
|
sl.view.check_gui_calls(["refresh"])
|
||||||
sl.append("foo")
|
sl.append("foo")
|
||||||
|
|||||||
@@ -91,7 +91,7 @@ def test_make_sure_theres_no_messup_between_queries():
|
|||||||
threads = []
|
threads = []
|
||||||
for i in range(1, 101):
|
for i in range(1, 101):
|
||||||
t = threading.Thread(target=run, args=(i,))
|
t = threading.Thread(target=run, args=(i,))
|
||||||
t.start
|
t.start()
|
||||||
threads.append(t)
|
threads.append(t)
|
||||||
while threads:
|
while threads:
|
||||||
time.sleep(0.1)
|
time.sleep(0.1)
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ class TestRow(Row):
|
|||||||
self._index = index
|
self._index = index
|
||||||
|
|
||||||
def load(self):
|
def load(self):
|
||||||
|
# Does nothing for test
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def save(self):
|
def save(self):
|
||||||
@@ -75,14 +76,17 @@ def test_allow_edit_when_attr_is_property_with_fset():
|
|||||||
class TestRow(Row):
|
class TestRow(Row):
|
||||||
@property
|
@property
|
||||||
def foo(self):
|
def foo(self):
|
||||||
|
# property only for existence checks
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def bar(self):
|
def bar(self):
|
||||||
|
# property only for existence checks
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@bar.setter
|
@bar.setter
|
||||||
def bar(self, value):
|
def bar(self, value):
|
||||||
|
# setter only for existence checks
|
||||||
pass
|
pass
|
||||||
|
|
||||||
row = TestRow(Table())
|
row = TestRow(Table())
|
||||||
@@ -97,10 +101,12 @@ def test_can_edit_prop_has_priority_over_fset_checks():
|
|||||||
class TestRow(Row):
|
class TestRow(Row):
|
||||||
@property
|
@property
|
||||||
def bar(self):
|
def bar(self):
|
||||||
|
# property only for existence checks
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@bar.setter
|
@bar.setter
|
||||||
def bar(self, value):
|
def bar(self, value):
|
||||||
|
# setter only for existence checks
|
||||||
pass
|
pass
|
||||||
|
|
||||||
can_edit_bar = False
|
can_edit_bar = False
|
||||||
|
|||||||
@@ -105,9 +105,7 @@ def test_findall_dont_include_self():
|
|||||||
# When calling findall with include_self=False, the node itself is never evaluated.
|
# When calling findall with include_self=False, the node itself is never evaluated.
|
||||||
t = tree_with_some_nodes()
|
t = tree_with_some_nodes()
|
||||||
del t._name # so that if the predicate is called on `t`, we crash
|
del t._name # so that if the predicate is called on `t`, we crash
|
||||||
r = t.findall(
|
r = t.findall(lambda n: not n.name.startswith("sub"), include_self=False) # no crash
|
||||||
lambda n: not n.name.startswith("sub"), include_self=False
|
|
||||||
) # no crash
|
|
||||||
eq_(set(r), set([t[0], t[1], t[2]]))
|
eq_(set(r), set([t[0], t[1], t[2]]))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -105,9 +105,7 @@ def test_iterconsume():
|
|||||||
# We just want to make sure that we return *all* items and that we're not mistakenly skipping
|
# We just want to make sure that we return *all* items and that we're not mistakenly skipping
|
||||||
# one.
|
# one.
|
||||||
eq_(list(range(2500)), list(iterconsume(list(range(2500)))))
|
eq_(list(range(2500)), list(iterconsume(list(range(2500)))))
|
||||||
eq_(
|
eq_(list(reversed(range(2500))), list(iterconsume(list(range(2500)), reverse=False)))
|
||||||
list(reversed(range(2500))), list(iterconsume(list(range(2500)), reverse=False))
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# --- String
|
# --- String
|
||||||
@@ -238,49 +236,8 @@ def test_multi_replace():
|
|||||||
|
|
||||||
# --- Files
|
# --- Files
|
||||||
|
|
||||||
# These test cases needed https://github.com/hsoft/pytest-monkeyplus/ which appears to not be compatible with latest
|
|
||||||
# pytest, looking at where this is used only appears to be in hscommon.localize_all_stringfiles at top level.
|
|
||||||
# Right now this repo does not seem to utilize any of that functionality so going to leave these tests out for now.
|
|
||||||
# TODO decide if fixing these tests is worth it or not.
|
|
||||||
|
|
||||||
# class TestCase_modified_after:
|
class TestCaseDeleteIfEmpty:
|
||||||
# def test_first_is_modified_after(self, monkeyplus):
|
|
||||||
# monkeyplus.patch_osstat("first", st_mtime=42)
|
|
||||||
# monkeyplus.patch_osstat("second", st_mtime=41)
|
|
||||||
# assert modified_after("first", "second")
|
|
||||||
|
|
||||||
# def test_second_is_modified_after(self, monkeyplus):
|
|
||||||
# monkeyplus.patch_osstat("first", st_mtime=42)
|
|
||||||
# monkeyplus.patch_osstat("second", st_mtime=43)
|
|
||||||
# assert not modified_after("first", "second")
|
|
||||||
|
|
||||||
# def test_same_mtime(self, monkeyplus):
|
|
||||||
# monkeyplus.patch_osstat("first", st_mtime=42)
|
|
||||||
# monkeyplus.patch_osstat("second", st_mtime=42)
|
|
||||||
# assert not modified_after("first", "second")
|
|
||||||
|
|
||||||
# def test_first_file_does_not_exist(self, monkeyplus):
|
|
||||||
# # when the first file doesn't exist, we return False
|
|
||||||
# monkeyplus.patch_osstat("second", st_mtime=42)
|
|
||||||
# assert not modified_after("does_not_exist", "second") # no crash
|
|
||||||
|
|
||||||
# def test_second_file_does_not_exist(self, monkeyplus):
|
|
||||||
# # when the second file doesn't exist, we return True
|
|
||||||
# monkeyplus.patch_osstat("first", st_mtime=42)
|
|
||||||
# assert modified_after("first", "does_not_exist") # no crash
|
|
||||||
|
|
||||||
# def test_first_file_is_none(self, monkeyplus):
|
|
||||||
# # when the first file is None, we return False
|
|
||||||
# monkeyplus.patch_osstat("second", st_mtime=42)
|
|
||||||
# assert not modified_after(None, "second") # no crash
|
|
||||||
|
|
||||||
# def test_second_file_is_none(self, monkeyplus):
|
|
||||||
# # when the second file is None, we return True
|
|
||||||
# monkeyplus.patch_osstat("first", st_mtime=42)
|
|
||||||
# assert modified_after("first", None) # no crash
|
|
||||||
|
|
||||||
|
|
||||||
class TestCase_delete_if_empty:
|
|
||||||
def test_is_empty(self, tmpdir):
|
def test_is_empty(self, tmpdir):
|
||||||
testpath = Path(str(tmpdir))
|
testpath = Path(str(tmpdir))
|
||||||
assert delete_if_empty(testpath)
|
assert delete_if_empty(testpath)
|
||||||
@@ -332,9 +289,11 @@ class TestCase_delete_if_empty:
|
|||||||
delete_if_empty(Path(str(tmpdir))) # no crash
|
delete_if_empty(Path(str(tmpdir))) # no crash
|
||||||
|
|
||||||
|
|
||||||
class TestCase_open_if_filename:
|
class TestCaseOpenIfFilename:
|
||||||
|
FILE_NAME = "test.txt"
|
||||||
|
|
||||||
def test_file_name(self, tmpdir):
|
def test_file_name(self, tmpdir):
|
||||||
filepath = str(tmpdir.join("test.txt"))
|
filepath = str(tmpdir.join(self.FILE_NAME))
|
||||||
open(filepath, "wb").write(b"test_data")
|
open(filepath, "wb").write(b"test_data")
|
||||||
file, close = open_if_filename(filepath)
|
file, close = open_if_filename(filepath)
|
||||||
assert close
|
assert close
|
||||||
@@ -350,16 +309,18 @@ class TestCase_open_if_filename:
|
|||||||
eq_("test_data", file.read())
|
eq_("test_data", file.read())
|
||||||
|
|
||||||
def test_mode_is_passed_to_open(self, tmpdir):
|
def test_mode_is_passed_to_open(self, tmpdir):
|
||||||
filepath = str(tmpdir.join("test.txt"))
|
filepath = str(tmpdir.join(self.FILE_NAME))
|
||||||
open(filepath, "w").close()
|
open(filepath, "w").close()
|
||||||
file, close = open_if_filename(filepath, "a")
|
file, close = open_if_filename(filepath, "a")
|
||||||
eq_("a", file.mode)
|
eq_("a", file.mode)
|
||||||
file.close()
|
file.close()
|
||||||
|
|
||||||
|
|
||||||
class TestCase_FileOrPath:
|
class TestCaseFileOrPath:
|
||||||
|
FILE_NAME = "test.txt"
|
||||||
|
|
||||||
def test_path(self, tmpdir):
|
def test_path(self, tmpdir):
|
||||||
filepath = str(tmpdir.join("test.txt"))
|
filepath = str(tmpdir.join(self.FILE_NAME))
|
||||||
open(filepath, "wb").write(b"test_data")
|
open(filepath, "wb").write(b"test_data")
|
||||||
with FileOrPath(filepath) as fp:
|
with FileOrPath(filepath) as fp:
|
||||||
eq_(b"test_data", fp.read())
|
eq_(b"test_data", fp.read())
|
||||||
@@ -372,7 +333,7 @@ class TestCase_FileOrPath:
|
|||||||
eq_("test_data", fp.read())
|
eq_("test_data", fp.read())
|
||||||
|
|
||||||
def test_mode_is_passed_to_open(self, tmpdir):
|
def test_mode_is_passed_to_open(self, tmpdir):
|
||||||
filepath = str(tmpdir.join("test.txt"))
|
filepath = str(tmpdir.join(self.FILE_NAME))
|
||||||
open(filepath, "w").close()
|
open(filepath, "w").close()
|
||||||
with FileOrPath(filepath, "a") as fp:
|
with FileOrPath(filepath, "a") as fp:
|
||||||
eq_("a", fp.mode)
|
eq_("a", fp.mode)
|
||||||
|
|||||||
@@ -86,9 +86,7 @@ class CallLogger:
|
|||||||
eq_(set(self.calls), set(expected))
|
eq_(set(self.calls), set(expected))
|
||||||
self.clear_calls()
|
self.clear_calls()
|
||||||
|
|
||||||
def check_gui_calls_partial(
|
def check_gui_calls_partial(self, expected=None, not_expected=None, verify_order=False):
|
||||||
self, expected=None, not_expected=None, verify_order=False
|
|
||||||
):
|
|
||||||
"""Checks that the expected calls have been made to 'self', then clears the log.
|
"""Checks that the expected calls have been made to 'self', then clears the log.
|
||||||
|
|
||||||
`expected` is an iterable of strings representing method names. Order doesn't matter.
|
`expected` is an iterable of strings representing method names. Order doesn't matter.
|
||||||
@@ -99,25 +97,17 @@ class CallLogger:
|
|||||||
__tracebackhide__ = True
|
__tracebackhide__ = True
|
||||||
if expected is not None:
|
if expected is not None:
|
||||||
not_called = set(expected) - set(self.calls)
|
not_called = set(expected) - set(self.calls)
|
||||||
assert not not_called, "These calls haven't been made: {0}".format(
|
assert not not_called, "These calls haven't been made: {0}".format(not_called)
|
||||||
not_called
|
|
||||||
)
|
|
||||||
if verify_order:
|
if verify_order:
|
||||||
max_index = 0
|
max_index = 0
|
||||||
for call in expected:
|
for call in expected:
|
||||||
index = self.calls.index(call)
|
index = self.calls.index(call)
|
||||||
if index < max_index:
|
if index < max_index:
|
||||||
raise AssertionError(
|
raise AssertionError("The call {0} hasn't been made in the correct order".format(call))
|
||||||
"The call {0} hasn't been made in the correct order".format(
|
|
||||||
call
|
|
||||||
)
|
|
||||||
)
|
|
||||||
max_index = index
|
max_index = index
|
||||||
if not_expected is not None:
|
if not_expected is not None:
|
||||||
called = set(not_expected) & set(self.calls)
|
called = set(not_expected) & set(self.calls)
|
||||||
assert not called, "These calls shouldn't have been made: {0}".format(
|
assert not called, "These calls shouldn't have been made: {0}".format(called)
|
||||||
called
|
|
||||||
)
|
|
||||||
self.clear_calls()
|
self.clear_calls()
|
||||||
|
|
||||||
|
|
||||||
@@ -193,7 +183,7 @@ def jointhreads():
|
|||||||
|
|
||||||
|
|
||||||
def _unify_args(func, args, kwargs, args_to_ignore=None):
|
def _unify_args(func, args, kwargs, args_to_ignore=None):
|
||||||
""" Unify args and kwargs in the same dictionary.
|
"""Unify args and kwargs in the same dictionary.
|
||||||
|
|
||||||
The result is kwargs with args added to it. func.func_code.co_varnames is used to determine
|
The result is kwargs with args added to it. func.func_code.co_varnames is used to determine
|
||||||
under what key each elements of arg will be mapped in kwargs.
|
under what key each elements of arg will be mapped in kwargs.
|
||||||
@@ -211,9 +201,7 @@ def _unify_args(func, args, kwargs, args_to_ignore=None):
|
|||||||
result = kwargs.copy()
|
result = kwargs.copy()
|
||||||
if hasattr(func, "__code__"): # built-in functions don't have func_code
|
if hasattr(func, "__code__"): # built-in functions don't have func_code
|
||||||
args = list(args)
|
args = list(args)
|
||||||
if (
|
if getattr(func, "__self__", None) is not None: # bound method, we have to add self to args list
|
||||||
getattr(func, "__self__", None) is not None
|
|
||||||
): # bound method, we have to add self to args list
|
|
||||||
args = [func.__self__] + args
|
args = [func.__self__] + args
|
||||||
defaults = list(func.__defaults__) if func.__defaults__ is not None else []
|
defaults = list(func.__defaults__) if func.__defaults__ is not None else []
|
||||||
arg_count = func.__code__.co_argcount
|
arg_count = func.__code__.co_argcount
|
||||||
@@ -234,7 +222,7 @@ def _unify_args(func, args, kwargs, args_to_ignore=None):
|
|||||||
|
|
||||||
|
|
||||||
def log_calls(func):
|
def log_calls(func):
|
||||||
""" Logs all func calls' arguments under func.calls.
|
"""Logs all func calls' arguments under func.calls.
|
||||||
|
|
||||||
func.calls is a list of _unify_args() result (dict).
|
func.calls is a list of _unify_args() result (dict).
|
||||||
|
|
||||||
@@ -242,8 +230,8 @@ def log_calls(func):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
unifiedArgs = _unify_args(func, args, kwargs)
|
unified_args = _unify_args(func, args, kwargs)
|
||||||
wrapper.calls.append(unifiedArgs)
|
wrapper.calls.append(unified_args)
|
||||||
return func(*args, **kwargs)
|
return func(*args, **kwargs)
|
||||||
|
|
||||||
wrapper.calls = []
|
wrapper.calls = []
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ import locale
|
|||||||
import logging
|
import logging
|
||||||
import os.path as op
|
import os.path as op
|
||||||
|
|
||||||
from .plat import ISWINDOWS, ISLINUX
|
from .plat import ISLINUX
|
||||||
|
|
||||||
_trfunc = None
|
_trfunc = None
|
||||||
_trget = None
|
_trget = None
|
||||||
@@ -46,36 +46,23 @@ def set_tr(new_tr, new_trget=None):
|
|||||||
|
|
||||||
|
|
||||||
def get_locale_name(lang):
|
def get_locale_name(lang):
|
||||||
if ISWINDOWS:
|
# Removed old conversion code as windows seems to support these
|
||||||
# http://msdn.microsoft.com/en-us/library/39cwe7zf(vs.71).aspx
|
|
||||||
LANG2LOCALENAME = {
|
|
||||||
"cs": "czy",
|
|
||||||
"de": "deu",
|
|
||||||
"el": "grc",
|
|
||||||
"es": "esn",
|
|
||||||
"fr": "fra",
|
|
||||||
"it": "ita",
|
|
||||||
"ko": "korean",
|
|
||||||
"nl": "nld",
|
|
||||||
"pl_PL": "polish_poland",
|
|
||||||
"pt_BR": "ptb",
|
|
||||||
"ru": "rus",
|
|
||||||
"zh_CN": "chs",
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
LANG2LOCALENAME = {
|
LANG2LOCALENAME = {
|
||||||
"cs": "cs_CZ",
|
"cs": "cs_CZ",
|
||||||
"de": "de_DE",
|
"de": "de_DE",
|
||||||
"el": "el_GR",
|
"el": "el_GR",
|
||||||
|
"en": "en",
|
||||||
"es": "es_ES",
|
"es": "es_ES",
|
||||||
"fr": "fr_FR",
|
"fr": "fr_FR",
|
||||||
"it": "it_IT",
|
|
||||||
"nl": "nl_NL",
|
|
||||||
"hy": "hy_AM",
|
"hy": "hy_AM",
|
||||||
|
"it": "it_IT",
|
||||||
|
"ja": "ja_JP",
|
||||||
"ko": "ko_KR",
|
"ko": "ko_KR",
|
||||||
|
"nl": "nl_NL",
|
||||||
"pl_PL": "pl_PL",
|
"pl_PL": "pl_PL",
|
||||||
"pt_BR": "pt_BR",
|
"pt_BR": "pt_BR",
|
||||||
"ru": "ru_RU",
|
"ru": "ru_RU",
|
||||||
|
"tr": "tr_TR",
|
||||||
"uk": "uk_UA",
|
"uk": "uk_UA",
|
||||||
"vi": "vi_VN",
|
"vi": "vi_VN",
|
||||||
"zh_CN": "zh_CN",
|
"zh_CN": "zh_CN",
|
||||||
@@ -123,9 +110,7 @@ def install_gettext_trans(base_folder, lang):
|
|||||||
if not lang:
|
if not lang:
|
||||||
return lambda s: s
|
return lambda s: s
|
||||||
try:
|
try:
|
||||||
return gettext.translation(
|
return gettext.translation(domain, localedir=base_folder, languages=[lang]).gettext
|
||||||
domain, localedir=base_folder, languages=[lang]
|
|
||||||
).gettext
|
|
||||||
except IOError:
|
except IOError:
|
||||||
return lambda s: s
|
return lambda s: s
|
||||||
|
|
||||||
@@ -146,11 +131,11 @@ def install_gettext_trans(base_folder, lang):
|
|||||||
def install_gettext_trans_under_cocoa():
|
def install_gettext_trans_under_cocoa():
|
||||||
from cocoa import proxy
|
from cocoa import proxy
|
||||||
|
|
||||||
resFolder = proxy.getResourcePath()
|
res_folder = proxy.getResourcePath()
|
||||||
baseFolder = op.join(resFolder, "locale")
|
base_folder = op.join(res_folder, "locale")
|
||||||
currentLang = proxy.systemLang()
|
current_lang = proxy.systemLang()
|
||||||
install_gettext_trans(baseFolder, currentLang)
|
install_gettext_trans(base_folder, current_lang)
|
||||||
localename = get_locale_name(currentLang)
|
localename = get_locale_name(current_lang)
|
||||||
if localename is not None:
|
if localename is not None:
|
||||||
locale.setlocale(locale.LC_ALL, localename)
|
locale.setlocale(locale.LC_ALL, localename)
|
||||||
|
|
||||||
|
|||||||
@@ -19,8 +19,7 @@ from .path import Path, pathify, log_io_error
|
|||||||
|
|
||||||
|
|
||||||
def nonone(value, replace_value):
|
def nonone(value, replace_value):
|
||||||
"""Returns ``value`` if ``value`` is not ``None``. Returns ``replace_value`` otherwise.
|
"""Returns ``value`` if ``value`` is not ``None``. Returns ``replace_value`` otherwise."""
|
||||||
"""
|
|
||||||
if value is None:
|
if value is None:
|
||||||
return replace_value
|
return replace_value
|
||||||
else:
|
else:
|
||||||
@@ -28,8 +27,7 @@ def nonone(value, replace_value):
|
|||||||
|
|
||||||
|
|
||||||
def tryint(value, default=0):
|
def tryint(value, default=0):
|
||||||
"""Tries to convert ``value`` to in ``int`` and returns ``default`` if it fails.
|
"""Tries to convert ``value`` to in ``int`` and returns ``default`` if it fails."""
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
return int(value)
|
return int(value)
|
||||||
except (TypeError, ValueError):
|
except (TypeError, ValueError):
|
||||||
@@ -37,8 +35,7 @@ def tryint(value, default=0):
|
|||||||
|
|
||||||
|
|
||||||
def minmax(value, min_value, max_value):
|
def minmax(value, min_value, max_value):
|
||||||
"""Returns `value` or one of the min/max bounds if `value` is not between them.
|
"""Returns `value` or one of the min/max bounds if `value` is not between them."""
|
||||||
"""
|
|
||||||
return min(max(value, min_value), max_value)
|
return min(max(value, min_value), max_value)
|
||||||
|
|
||||||
|
|
||||||
@@ -75,8 +72,7 @@ def flatten(iterables, start_with=None):
|
|||||||
|
|
||||||
|
|
||||||
def first(iterable):
|
def first(iterable):
|
||||||
"""Returns the first item of ``iterable``.
|
"""Returns the first item of ``iterable``."""
|
||||||
"""
|
|
||||||
try:
|
try:
|
||||||
return next(iter(iterable))
|
return next(iter(iterable))
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
@@ -84,14 +80,12 @@ def first(iterable):
|
|||||||
|
|
||||||
|
|
||||||
def stripfalse(seq):
|
def stripfalse(seq):
|
||||||
"""Returns a sequence with all false elements stripped out of seq.
|
"""Returns a sequence with all false elements stripped out of seq."""
|
||||||
"""
|
|
||||||
return [x for x in seq if x]
|
return [x for x in seq if x]
|
||||||
|
|
||||||
|
|
||||||
def extract(predicate, iterable):
|
def extract(predicate, iterable):
|
||||||
"""Separates the wheat from the shaft (`predicate` defines what's the wheat), and returns both.
|
"""Separates the wheat from the shaft (`predicate` defines what's the wheat), and returns both."""
|
||||||
"""
|
|
||||||
wheat = []
|
wheat = []
|
||||||
shaft = []
|
shaft = []
|
||||||
for item in iterable:
|
for item in iterable:
|
||||||
@@ -103,8 +97,7 @@ def extract(predicate, iterable):
|
|||||||
|
|
||||||
|
|
||||||
def allsame(iterable):
|
def allsame(iterable):
|
||||||
"""Returns whether all elements of 'iterable' are the same.
|
"""Returns whether all elements of 'iterable' are the same."""
|
||||||
"""
|
|
||||||
it = iter(iterable)
|
it = iter(iterable)
|
||||||
try:
|
try:
|
||||||
first_item = next(it)
|
first_item = next(it)
|
||||||
@@ -152,14 +145,12 @@ def iterconsume(seq, reverse=True):
|
|||||||
|
|
||||||
|
|
||||||
def escape(s, to_escape, escape_with="\\"):
|
def escape(s, to_escape, escape_with="\\"):
|
||||||
"""Returns ``s`` with characters in ``to_escape`` all prepended with ``escape_with``.
|
"""Returns ``s`` with characters in ``to_escape`` all prepended with ``escape_with``."""
|
||||||
"""
|
|
||||||
return "".join((escape_with + c if c in to_escape else c) for c in s)
|
return "".join((escape_with + c if c in to_escape else c) for c in s)
|
||||||
|
|
||||||
|
|
||||||
def get_file_ext(filename):
|
def get_file_ext(filename):
|
||||||
"""Returns the lowercase extension part of filename, without the dot.
|
"""Returns the lowercase extension part of filename, without the dot."""
|
||||||
"""
|
|
||||||
pos = filename.rfind(".")
|
pos = filename.rfind(".")
|
||||||
if pos > -1:
|
if pos > -1:
|
||||||
return filename[pos + 1 :].lower()
|
return filename[pos + 1 :].lower()
|
||||||
@@ -168,8 +159,7 @@ def get_file_ext(filename):
|
|||||||
|
|
||||||
|
|
||||||
def rem_file_ext(filename):
|
def rem_file_ext(filename):
|
||||||
"""Returns the filename without extension.
|
"""Returns the filename without extension."""
|
||||||
"""
|
|
||||||
pos = filename.rfind(".")
|
pos = filename.rfind(".")
|
||||||
if pos > -1:
|
if pos > -1:
|
||||||
return filename[:pos]
|
return filename[:pos]
|
||||||
@@ -187,13 +177,13 @@ def pluralize(number, word, decimals=0, plural_word=None):
|
|||||||
``plural_word``: If the plural rule for word is more complex than adding a 's', specify a plural
|
``plural_word``: If the plural rule for word is more complex than adding a 's', specify a plural
|
||||||
"""
|
"""
|
||||||
number = round(number, decimals)
|
number = round(number, decimals)
|
||||||
format = "%%1.%df %%s" % decimals
|
plural_format = "%%1.%df %%s" % decimals
|
||||||
if number > 1:
|
if number > 1:
|
||||||
if plural_word is None:
|
if plural_word is None:
|
||||||
word += "s"
|
word += "s"
|
||||||
else:
|
else:
|
||||||
word = plural_word
|
word = plural_word
|
||||||
return format % (number, word)
|
return plural_format % (number, word)
|
||||||
|
|
||||||
|
|
||||||
def format_time(seconds, with_hours=True):
|
def format_time(seconds, with_hours=True):
|
||||||
@@ -217,8 +207,7 @@ def format_time(seconds, with_hours=True):
|
|||||||
|
|
||||||
|
|
||||||
def format_time_decimal(seconds):
|
def format_time_decimal(seconds):
|
||||||
"""Transforms seconds in a strings like '3.4 minutes'.
|
"""Transforms seconds in a strings like '3.4 minutes'."""
|
||||||
"""
|
|
||||||
minus = seconds < 0
|
minus = seconds < 0
|
||||||
if minus:
|
if minus:
|
||||||
seconds *= -1
|
seconds *= -1
|
||||||
@@ -263,7 +252,7 @@ def format_size(size, decimal=0, forcepower=-1, showdesc=True):
|
|||||||
div = SIZE_VALS[i - 1]
|
div = SIZE_VALS[i - 1]
|
||||||
else:
|
else:
|
||||||
div = 1
|
div = 1
|
||||||
format = "%%%d.%df" % (decimal, decimal)
|
size_format = "%%%d.%df" % (decimal, decimal)
|
||||||
negative = size < 0
|
negative = size < 0
|
||||||
divided_size = (0.0 + abs(size)) / div
|
divided_size = (0.0 + abs(size)) / div
|
||||||
if decimal == 0:
|
if decimal == 0:
|
||||||
@@ -272,7 +261,7 @@ def format_size(size, decimal=0, forcepower=-1, showdesc=True):
|
|||||||
divided_size = ceil(divided_size * (10 ** decimal)) / (10 ** decimal)
|
divided_size = ceil(divided_size * (10 ** decimal)) / (10 ** decimal)
|
||||||
if negative:
|
if negative:
|
||||||
divided_size *= -1
|
divided_size *= -1
|
||||||
result = format % divided_size
|
result = size_format % divided_size
|
||||||
if showdesc:
|
if showdesc:
|
||||||
result += " " + SIZE_DESC[i]
|
result += " " + SIZE_DESC[i]
|
||||||
return result
|
return result
|
||||||
@@ -303,7 +292,7 @@ def multi_replace(s, replace_from, replace_to=""):
|
|||||||
the same length as ``replace_from``, it will be transformed into a list.
|
the same length as ``replace_from``, it will be transformed into a list.
|
||||||
"""
|
"""
|
||||||
if isinstance(replace_to, str) and (len(replace_from) != len(replace_to)):
|
if isinstance(replace_to, str) and (len(replace_from) != len(replace_to)):
|
||||||
replace_to = [replace_to for r in replace_from]
|
replace_to = [replace_to for _ in replace_from]
|
||||||
if len(replace_from) != len(replace_to):
|
if len(replace_from) != len(replace_to):
|
||||||
raise ValueError("len(replace_from) must be equal to len(replace_to)")
|
raise ValueError("len(replace_from) must be equal to len(replace_to)")
|
||||||
replace = list(zip(replace_from, replace_to))
|
replace = list(zip(replace_from, replace_to))
|
||||||
@@ -320,8 +309,7 @@ ONE_DAY = timedelta(1)
|
|||||||
|
|
||||||
|
|
||||||
def iterdaterange(start, end):
|
def iterdaterange(start, end):
|
||||||
"""Yields every day between ``start`` and ``end``.
|
"""Yields every day between ``start`` and ``end``."""
|
||||||
"""
|
|
||||||
date = start
|
date = start
|
||||||
while date <= end:
|
while date <= end:
|
||||||
yield date
|
yield date
|
||||||
@@ -365,8 +353,7 @@ def find_in_path(name, paths=None):
|
|||||||
@log_io_error
|
@log_io_error
|
||||||
@pathify
|
@pathify
|
||||||
def delete_if_empty(path: Path, files_to_delete=[]):
|
def delete_if_empty(path: Path, files_to_delete=[]):
|
||||||
"""Deletes the directory at 'path' if it is empty or if it only contains files_to_delete.
|
"""Deletes the directory at 'path' if it is empty or if it only contains files_to_delete."""
|
||||||
"""
|
|
||||||
if not path.exists() or not path.isdir():
|
if not path.exists() or not path.isdir():
|
||||||
return
|
return
|
||||||
contents = path.listdir()
|
contents = path.listdir()
|
||||||
@@ -411,8 +398,7 @@ def ensure_file(path):
|
|||||||
|
|
||||||
|
|
||||||
def delete_files_with_pattern(folder_path, pattern, recursive=True):
|
def delete_files_with_pattern(folder_path, pattern, recursive=True):
|
||||||
"""Delete all files (or folders) in `folder_path` that match the glob `pattern`.
|
"""Delete all files (or folders) in `folder_path` that match the glob `pattern`."""
|
||||||
"""
|
|
||||||
to_delete = glob.glob(op.join(folder_path, pattern))
|
to_delete = glob.glob(op.join(folder_path, pattern))
|
||||||
for fn in to_delete:
|
for fn in to_delete:
|
||||||
if op.isdir(fn):
|
if op.isdir(fn):
|
||||||
|
|||||||
@@ -36,95 +36,95 @@ msgstr ""
|
|||||||
msgid "Sending to Trash"
|
msgid "Sending to Trash"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:308
|
#: core\app.py:287
|
||||||
msgid "A previous action is still hanging in there. You can't start a new one yet. Wait a few seconds, then try again."
|
msgid "A previous action is still hanging in there. You can't start a new one yet. Wait a few seconds, then try again."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:318
|
#: core\app.py:297
|
||||||
msgid "No duplicates found."
|
msgid "No duplicates found."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:333
|
#: core\app.py:312
|
||||||
msgid "All marked files were copied successfully."
|
msgid "All marked files were copied successfully."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:334
|
#: core\app.py:313
|
||||||
msgid "All marked files were moved successfully."
|
msgid "All marked files were moved successfully."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:335
|
#: core\app.py:314
|
||||||
msgid "All marked files were successfully sent to Trash."
|
msgid "All marked files were successfully sent to Trash."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:343
|
#: core\app.py:320
|
||||||
msgid "Could not load file: {}"
|
msgid "Could not load file: {}"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:399
|
#: core\app.py:376
|
||||||
msgid "'{}' already is in the list."
|
msgid "'{}' already is in the list."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:401
|
#: core\app.py:378
|
||||||
msgid "'{}' does not exist."
|
msgid "'{}' does not exist."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:410
|
#: core\app.py:386
|
||||||
msgid "All selected %d matches are going to be ignored in all subsequent scans. Continue?"
|
msgid "All selected %d matches are going to be ignored in all subsequent scans. Continue?"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:486
|
#: core\app.py:460
|
||||||
msgid "Select a directory to copy marked files to"
|
msgid "Select a directory to copy marked files to"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:487
|
#: core\app.py:462
|
||||||
msgid "Select a directory to move marked files to"
|
msgid "Select a directory to move marked files to"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:527
|
#: core\app.py:501
|
||||||
msgid "Select a destination for your exported CSV"
|
msgid "Select a destination for your exported CSV"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:534 core\app.py:801 core\app.py:811
|
#: core\app.py:507 core\app.py:761 core\app.py:771
|
||||||
msgid "Couldn't write to file: {}"
|
msgid "Couldn't write to file: {}"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:559
|
#: core\app.py:530
|
||||||
msgid "You have no custom command set up. Set it up in your preferences."
|
msgid "You have no custom command set up. Set it up in your preferences."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:727 core\app.py:740
|
#: core\app.py:688 core\app.py:700
|
||||||
msgid "You are about to remove %d files from results. Continue?"
|
msgid "You are about to remove %d files from results. Continue?"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:774
|
#: core\app.py:736
|
||||||
msgid "{} duplicate groups were changed by the re-prioritization."
|
msgid "{} duplicate groups were changed by the re-prioritization."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:821
|
#: core\app.py:780
|
||||||
msgid "The selected directories contain no scannable file."
|
msgid "The selected directories contain no scannable file."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:835
|
#: core\app.py:793
|
||||||
msgid "Collecting files to scan"
|
msgid "Collecting files to scan"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\app.py:891
|
#: core\app.py:840
|
||||||
msgid "%s (%d discarded)"
|
msgid "%s (%d discarded)"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\engine.py:244 core\engine.py:288
|
#: core\engine.py:251 core\engine.py:294
|
||||||
msgid "0 matches found"
|
msgid "0 matches found"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\engine.py:262 core\engine.py:296
|
#: core\engine.py:269 core\engine.py:306
|
||||||
msgid "%d matches found"
|
msgid "%d matches found"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\gui\deletion_options.py:73
|
#: core\gui\deletion_options.py:71
|
||||||
msgid "You are sending {} file(s) to the Trash."
|
msgid "You are sending {} file(s) to the Trash."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\gui\exclude_list_table.py:15
|
#: core\gui\exclude_list_table.py:14
|
||||||
msgid "Regular Expressions"
|
msgid "Regular Expressions"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@@ -156,15 +156,15 @@ msgstr ""
|
|||||||
msgid "Analyzed %d/%d pictures"
|
msgid "Analyzed %d/%d pictures"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\pe\matchblock.py:181
|
#: core\pe\matchblock.py:177
|
||||||
msgid "Performed %d/%d chunk matches"
|
msgid "Performed %d/%d chunk matches"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\pe\matchblock.py:191
|
#: core\pe\matchblock.py:185
|
||||||
msgid "Preparing for matching"
|
msgid "Preparing for matching"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\pe\matchblock.py:244
|
#: core\pe\matchblock.py:234
|
||||||
msgid "Verified %d/%d matches"
|
msgid "Verified %d/%d matches"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@@ -212,11 +212,11 @@ msgstr ""
|
|||||||
msgid "Oldest"
|
msgid "Oldest"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\results.py:142
|
#: core\results.py:134
|
||||||
msgid "%d / %d (%s / %s) duplicates marked."
|
msgid "%d / %d (%s / %s) duplicates marked."
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#: core\results.py:149
|
#: core\results.py:141
|
||||||
msgid " filter: %s"
|
msgid " filter: %s"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
|||||||
@@ -308,7 +308,7 @@ msgstr "Rimuovi le cartelle vuote dopo aver cancellato o spostato"
|
|||||||
#: qt/me/preferences_dialog.py:60 qt/pe/preferences_dialog.py:27
|
#: qt/me/preferences_dialog.py:60 qt/pe/preferences_dialog.py:27
|
||||||
#: qt/se/preferences_dialog.py:59 cocoa/en.lproj/Localizable.strings:0
|
#: qt/se/preferences_dialog.py:59 cocoa/en.lproj/Localizable.strings:0
|
||||||
msgid "Ignore duplicates hardlinking to the same file"
|
msgid "Ignore duplicates hardlinking to the same file"
|
||||||
msgstr "Non creare gli hardlink per i duplicati verso il medesimo file"
|
msgstr "Non considerare gli hardlink come duplicati"
|
||||||
|
|
||||||
#: qt/me/preferences_dialog.py:62 qt/pe/preferences_dialog.py:29
|
#: qt/me/preferences_dialog.py:62 qt/pe/preferences_dialog.py:29
|
||||||
#: qt/se/preferences_dialog.py:62 cocoa/en.lproj/Localizable.strings:0
|
#: qt/se/preferences_dialog.py:62 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
# Translators:
|
# Translators:
|
||||||
# Andrew Senetar <arsenetar@gmail.com>, 2021
|
# Andrew Senetar <arsenetar@gmail.com>, 2021
|
||||||
|
# Bas <duvel3@gmail.com>, 2021
|
||||||
#
|
#
|
||||||
msgid ""
|
msgid ""
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Last-Translator: Andrew Senetar <arsenetar@gmail.com>, 2021\n"
|
"Last-Translator: Bas <duvel3@gmail.com>, 2021\n"
|
||||||
"Language-Team: Dutch (https://www.transifex.com/voltaicideas/teams/116153/nl/)\n"
|
"Language-Team: Dutch (https://www.transifex.com/voltaicideas/teams/116153/nl/)\n"
|
||||||
"Language: nl\n"
|
"Language: nl\n"
|
||||||
"Content-Type: text/plain; charset=UTF-8\n"
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
@@ -13,11 +14,11 @@ msgstr ""
|
|||||||
#: core\gui\ignore_list_table.py:19 core\gui\ignore_list_table.py:20
|
#: core\gui\ignore_list_table.py:19 core\gui\ignore_list_table.py:20
|
||||||
#: core\gui\problem_table.py:18
|
#: core\gui\problem_table.py:18
|
||||||
msgid "File Path"
|
msgid "File Path"
|
||||||
msgstr "Bestand locatie"
|
msgstr "Bestandspad"
|
||||||
|
|
||||||
#: core\gui\problem_table.py:19
|
#: core\gui\problem_table.py:19
|
||||||
msgid "Error Message"
|
msgid "Error Message"
|
||||||
msgstr "Fout Melding"
|
msgstr "Foutmelding"
|
||||||
|
|
||||||
#: core\me\prioritize.py:23
|
#: core\me\prioritize.py:23
|
||||||
msgid "Duration"
|
msgid "Duration"
|
||||||
@@ -39,11 +40,11 @@ msgstr "Bestandsnaam"
|
|||||||
#: core\me\result_table.py:20 core\pe\result_table.py:20 core\prioritize.py:75
|
#: core\me\result_table.py:20 core\pe\result_table.py:20 core\prioritize.py:75
|
||||||
#: core\se\result_table.py:20
|
#: core\se\result_table.py:20
|
||||||
msgid "Folder"
|
msgid "Folder"
|
||||||
msgstr "Folder"
|
msgstr "Map"
|
||||||
|
|
||||||
#: core\me\result_table.py:21
|
#: core\me\result_table.py:21
|
||||||
msgid "Size (MB)"
|
msgid "Size (MB)"
|
||||||
msgstr "Grote (MB)"
|
msgstr "Grootte (MB)"
|
||||||
|
|
||||||
#: core\me\result_table.py:22
|
#: core\me\result_table.py:22
|
||||||
msgid "Time"
|
msgid "Time"
|
||||||
@@ -56,7 +57,7 @@ msgstr "Sample Frequentie"
|
|||||||
#: core\me\result_table.py:25 core\pe\result_table.py:22 core\prioritize.py:65
|
#: core\me\result_table.py:25 core\pe\result_table.py:22 core\prioritize.py:65
|
||||||
#: core\se\result_table.py:22
|
#: core\se\result_table.py:22
|
||||||
msgid "Kind"
|
msgid "Kind"
|
||||||
msgstr "Kind"
|
msgstr "Soort"
|
||||||
|
|
||||||
#: core\me\result_table.py:26 core\pe\result_table.py:25
|
#: core\me\result_table.py:26 core\pe\result_table.py:25
|
||||||
#: core\prioritize.py:163 core\se\result_table.py:23
|
#: core\prioritize.py:163 core\se\result_table.py:23
|
||||||
@@ -111,7 +112,7 @@ msgstr "Afmetingen"
|
|||||||
|
|
||||||
#: core\pe\result_table.py:21 core\se\result_table.py:21
|
#: core\pe\result_table.py:21 core\se\result_table.py:21
|
||||||
msgid "Size (KB)"
|
msgid "Size (KB)"
|
||||||
msgstr "Grote (KB)"
|
msgstr "Grootte (KB)"
|
||||||
|
|
||||||
#: core\pe\result_table.py:24
|
#: core\pe\result_table.py:24
|
||||||
msgid "EXIF Timestamp"
|
msgid "EXIF Timestamp"
|
||||||
@@ -119,4 +120,4 @@ msgstr "EXIF Tijdstip"
|
|||||||
|
|
||||||
#: core\prioritize.py:156
|
#: core\prioritize.py:156
|
||||||
msgid "Size"
|
msgid "Size"
|
||||||
msgstr "Grote"
|
msgstr "Grootte"
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
# Translators:
|
# Translators:
|
||||||
# Andrew Senetar <arsenetar@gmail.com>, 2021
|
# Andrew Senetar <arsenetar@gmail.com>, 2021
|
||||||
# Fuan <jcfrt@posteo.net>, 2021
|
# Fuan <jcfrt@posteo.net>, 2021
|
||||||
|
# Bas <duvel3@gmail.com>, 2021
|
||||||
#
|
#
|
||||||
msgid ""
|
msgid ""
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Last-Translator: Fuan <jcfrt@posteo.net>, 2021\n"
|
"Last-Translator: Bas <duvel3@gmail.com>, 2021\n"
|
||||||
"Language-Team: Dutch (https://www.transifex.com/voltaicideas/teams/116153/nl/)\n"
|
"Language-Team: Dutch (https://www.transifex.com/voltaicideas/teams/116153/nl/)\n"
|
||||||
"Language: nl\n"
|
"Language: nl\n"
|
||||||
"Content-Type: text/plain; charset=UTF-8\n"
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
@@ -42,7 +43,7 @@ msgstr "Verplaatsen"
|
|||||||
|
|
||||||
#: core\app.py:74
|
#: core\app.py:74
|
||||||
msgid "Copying"
|
msgid "Copying"
|
||||||
msgstr "Kopieeren"
|
msgstr "Kopiëren"
|
||||||
|
|
||||||
#: core\app.py:75
|
#: core\app.py:75
|
||||||
msgid "Sending to Trash"
|
msgid "Sending to Trash"
|
||||||
@@ -78,7 +79,7 @@ msgstr "Kan bestand niet laden: {}"
|
|||||||
|
|
||||||
#: core\app.py:399
|
#: core\app.py:399
|
||||||
msgid "'{}' already is in the list."
|
msgid "'{}' already is in the list."
|
||||||
msgstr "'{}'staat al in de lijst."
|
msgstr "'{}' staat al in de lijst."
|
||||||
|
|
||||||
#: core\app.py:401
|
#: core\app.py:401
|
||||||
msgid "'{}' does not exist."
|
msgid "'{}' does not exist."
|
||||||
@@ -151,7 +152,7 @@ msgstr "%d overeenkomsten gevonden"
|
|||||||
|
|
||||||
#: core\gui\deletion_options.py:73
|
#: core\gui\deletion_options.py:73
|
||||||
msgid "You are sending {} file(s) to the Trash."
|
msgid "You are sending {} file(s) to the Trash."
|
||||||
msgstr "Je verplaatst {} bestanden naar de prullenbak"
|
msgstr "Je verplaatst {} bestand(en) naar de prullenbak"
|
||||||
|
|
||||||
#: core\gui\exclude_list_table.py:15
|
#: core\gui\exclude_list_table.py:15
|
||||||
msgid "Regular Expressions"
|
msgid "Regular Expressions"
|
||||||
@@ -160,7 +161,7 @@ msgstr "Normale Uitdrukkingen"
|
|||||||
#: core\gui\ignore_list_dialog.py:25
|
#: core\gui\ignore_list_dialog.py:25
|
||||||
msgid "Do you really want to remove all %d items from the ignore list?"
|
msgid "Do you really want to remove all %d items from the ignore list?"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Weet je zeker dat je all %d regels uit de overslaan lijst wilt verwijderen?"
|
"Weet je zeker dat je alle %d regels uit de overslaan lijst wilt verwijderen?"
|
||||||
|
|
||||||
#: core\me\scanner.py:20 core\se\scanner.py:16
|
#: core\me\scanner.py:20 core\se\scanner.py:16
|
||||||
msgid "Filename"
|
msgid "Filename"
|
||||||
@@ -252,7 +253,7 @@ msgstr "filter: %s"
|
|||||||
|
|
||||||
#: core\scanner.py:85
|
#: core\scanner.py:85
|
||||||
msgid "Read size of %d/%d files"
|
msgid "Read size of %d/%d files"
|
||||||
msgstr "Bestands grote van %d/%d bestanden aan het lezen."
|
msgstr "Bestandsgrootte van %d/%d bestanden aan het lezen."
|
||||||
|
|
||||||
#: core\scanner.py:109
|
#: core\scanner.py:109
|
||||||
msgid "Read metadata of %d/%d files"
|
msgid "Read metadata of %d/%d files"
|
||||||
|
|||||||
@@ -1,10 +1,11 @@
|
|||||||
# Translators:
|
# Translators:
|
||||||
# Andrew Senetar <arsenetar@gmail.com>, 2021
|
# Andrew Senetar <arsenetar@gmail.com>, 2021
|
||||||
# Fuan <jcfrt@posteo.net>, 2021
|
# Fuan <jcfrt@posteo.net>, 2021
|
||||||
|
# Bas <duvel3@gmail.com>, 2021
|
||||||
#
|
#
|
||||||
msgid ""
|
msgid ""
|
||||||
msgstr ""
|
msgstr ""
|
||||||
"Last-Translator: Fuan <jcfrt@posteo.net>, 2021\n"
|
"Last-Translator: Bas <duvel3@gmail.com>, 2021\n"
|
||||||
"Language-Team: Dutch (https://www.transifex.com/voltaicideas/teams/116153/nl/)\n"
|
"Language-Team: Dutch (https://www.transifex.com/voltaicideas/teams/116153/nl/)\n"
|
||||||
"Language: nl\n"
|
"Language: nl\n"
|
||||||
"Content-Type: text/plain; charset=UTF-8\n"
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
@@ -47,7 +48,7 @@ msgstr "Weet je zeker dat je de afbeeldings-analyse cache wilt verwijderen"
|
|||||||
|
|
||||||
#: qt/app.py:184
|
#: qt/app.py:184
|
||||||
msgid "Picture cache cleared."
|
msgid "Picture cache cleared."
|
||||||
msgstr "Afbeelding cache leeggemaakt"
|
msgstr "Afbeelding cache leeggemaakt."
|
||||||
|
|
||||||
#: qt/app.py:251
|
#: qt/app.py:251
|
||||||
msgid "{} file (*.{})"
|
msgid "{} file (*.{})"
|
||||||
@@ -55,7 +56,7 @@ msgstr "{} bestand (*.{})"
|
|||||||
|
|
||||||
#: qt/deletion_options.py:30 cocoa/en.lproj/Localizable.strings:0
|
#: qt/deletion_options.py:30 cocoa/en.lproj/Localizable.strings:0
|
||||||
msgid "Deletion Options"
|
msgid "Deletion Options"
|
||||||
msgstr "verwijder opties"
|
msgstr "Verwijderopties"
|
||||||
|
|
||||||
#: qt/deletion_options.py:35 cocoa/en.lproj/Localizable.strings:0
|
#: qt/deletion_options.py:35 cocoa/en.lproj/Localizable.strings:0
|
||||||
msgid "Link deleted files"
|
msgid "Link deleted files"
|
||||||
@@ -125,7 +126,7 @@ msgstr "Resultaten venster"
|
|||||||
|
|
||||||
#: qt/directories_dialog.py:66
|
#: qt/directories_dialog.py:66
|
||||||
msgid "Add Folder..."
|
msgid "Add Folder..."
|
||||||
msgstr "Folder toevoegen"
|
msgstr "Folder toevoegen..."
|
||||||
|
|
||||||
#: qt/directories_dialog.py:74 qt/result_window.py:100
|
#: qt/directories_dialog.py:74 qt/result_window.py:100
|
||||||
#: cocoa/en.lproj/Localizable.strings:0
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
@@ -287,7 +288,7 @@ msgstr "Word gewicht"
|
|||||||
#: qt/me/preferences_dialog.py:52 qt/se/preferences_dialog.py:32
|
#: qt/me/preferences_dialog.py:52 qt/se/preferences_dialog.py:32
|
||||||
#: cocoa/en.lproj/Localizable.strings:0
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
msgid "Match similar words"
|
msgid "Match similar words"
|
||||||
msgstr "vergelijk gelijkwaardige woorden"
|
msgstr "Vergelijk gelijkwaardige woorden"
|
||||||
|
|
||||||
#: qt/me/preferences_dialog.py:54 qt/pe/preferences_dialog.py:21
|
#: qt/me/preferences_dialog.py:54 qt/pe/preferences_dialog.py:21
|
||||||
#: qt/se/preferences_dialog.py:34 cocoa/en.lproj/Localizable.strings:0
|
#: qt/se/preferences_dialog.py:34 cocoa/en.lproj/Localizable.strings:0
|
||||||
@@ -307,7 +308,7 @@ msgstr "Verwijder lege folders tijdens weggooien of verplaatsen"
|
|||||||
#: qt/me/preferences_dialog.py:60 qt/pe/preferences_dialog.py:27
|
#: qt/me/preferences_dialog.py:60 qt/pe/preferences_dialog.py:27
|
||||||
#: qt/se/preferences_dialog.py:59 cocoa/en.lproj/Localizable.strings:0
|
#: qt/se/preferences_dialog.py:59 cocoa/en.lproj/Localizable.strings:0
|
||||||
msgid "Ignore duplicates hardlinking to the same file"
|
msgid "Ignore duplicates hardlinking to the same file"
|
||||||
msgstr "negeer dubbelingen die hard gelinkt zijn aan het zelfde bestand"
|
msgstr "Negeer dubbelingen die hard gelinkt zijn aan het zelfde bestand"
|
||||||
|
|
||||||
#: qt/me/preferences_dialog.py:62 qt/pe/preferences_dialog.py:29
|
#: qt/me/preferences_dialog.py:62 qt/pe/preferences_dialog.py:29
|
||||||
#: qt/se/preferences_dialog.py:62 cocoa/en.lproj/Localizable.strings:0
|
#: qt/se/preferences_dialog.py:62 cocoa/en.lproj/Localizable.strings:0
|
||||||
@@ -633,7 +634,7 @@ msgstr "Folder selectie venster"
|
|||||||
|
|
||||||
#: cocoa/en.lproj/Localizable.strings:0
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
msgid "Font Size:"
|
msgid "Font Size:"
|
||||||
msgstr "Font grote:"
|
msgstr "Grootte lettertype:"
|
||||||
|
|
||||||
#: cocoa/en.lproj/Localizable.strings:0
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
msgid "Hide dupeGuru"
|
msgid "Hide dupeGuru"
|
||||||
|
|||||||
122
locale/tr/LC_MESSAGES/columns.po
Normal file
122
locale/tr/LC_MESSAGES/columns.po
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
# Translators:
|
||||||
|
# Ahmet Haydar Işık <itsahmthydr@gmail.com>, 2021
|
||||||
|
#
|
||||||
|
msgid ""
|
||||||
|
msgstr ""
|
||||||
|
"Last-Translator: Ahmet Haydar Işık <itsahmthydr@gmail.com>, 2021\n"
|
||||||
|
"Language-Team: Turkish (https://www.transifex.com/voltaicideas/teams/116153/tr/)\n"
|
||||||
|
"Language: tr\n"
|
||||||
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
|
"Content-Transfer-Encoding: utf-8\n"
|
||||||
|
"Plural-Forms: nplurals=2; plural=(n > 1);\n"
|
||||||
|
|
||||||
|
#: core\gui\ignore_list_table.py:19 core\gui\ignore_list_table.py:20
|
||||||
|
#: core\gui\problem_table.py:18
|
||||||
|
msgid "File Path"
|
||||||
|
msgstr "Dosya Yolu"
|
||||||
|
|
||||||
|
#: core\gui\problem_table.py:19
|
||||||
|
msgid "Error Message"
|
||||||
|
msgstr "Hata Mesajı"
|
||||||
|
|
||||||
|
#: core\me\prioritize.py:23
|
||||||
|
msgid "Duration"
|
||||||
|
msgstr "Süre"
|
||||||
|
|
||||||
|
#: core\me\prioritize.py:30 core\me\result_table.py:23
|
||||||
|
msgid "Bitrate"
|
||||||
|
msgstr "Bit hızı"
|
||||||
|
|
||||||
|
#: core\me\prioritize.py:37
|
||||||
|
msgid "Samplerate"
|
||||||
|
msgstr "Örnekleme hızı"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:19 core\pe\result_table.py:19 core\prioritize.py:92
|
||||||
|
#: core\se\result_table.py:19
|
||||||
|
msgid "Filename"
|
||||||
|
msgstr "Dosya adı"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:20 core\pe\result_table.py:20 core\prioritize.py:75
|
||||||
|
#: core\se\result_table.py:20
|
||||||
|
msgid "Folder"
|
||||||
|
msgstr "Dizin"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:21
|
||||||
|
msgid "Size (MB)"
|
||||||
|
msgstr "Boyut (MB)"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:22
|
||||||
|
msgid "Time"
|
||||||
|
msgstr "Zaman"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:24
|
||||||
|
msgid "Sample Rate"
|
||||||
|
msgstr "Örnekleme Hızı"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:25 core\pe\result_table.py:22 core\prioritize.py:65
|
||||||
|
#: core\se\result_table.py:22
|
||||||
|
msgid "Kind"
|
||||||
|
msgstr "Tür"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:26 core\pe\result_table.py:25
|
||||||
|
#: core\prioritize.py:163 core\se\result_table.py:23
|
||||||
|
msgid "Modification"
|
||||||
|
msgstr "Düzenleme"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:27
|
||||||
|
msgid "Title"
|
||||||
|
msgstr "Başlık"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:28
|
||||||
|
msgid "Artist"
|
||||||
|
msgstr "Sanatçı"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:29
|
||||||
|
msgid "Album"
|
||||||
|
msgstr "Albüm"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:30
|
||||||
|
msgid "Genre"
|
||||||
|
msgstr "Tarz"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:31
|
||||||
|
msgid "Year"
|
||||||
|
msgstr "Yıl"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:32
|
||||||
|
msgid "Track Number"
|
||||||
|
msgstr "Parça Numarası"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:33
|
||||||
|
msgid "Comment"
|
||||||
|
msgstr "Yorum"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:34 core\pe\result_table.py:26
|
||||||
|
#: core\se\result_table.py:24
|
||||||
|
msgid "Match %"
|
||||||
|
msgstr "Eşleşme oranı %"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:35 core\se\result_table.py:25
|
||||||
|
msgid "Words Used"
|
||||||
|
msgstr "Kullanılan Kelimeler"
|
||||||
|
|
||||||
|
#: core\me\result_table.py:36 core\pe\result_table.py:27
|
||||||
|
#: core\se\result_table.py:26
|
||||||
|
msgid "Dupe Count"
|
||||||
|
msgstr "Kopya Sayısı"
|
||||||
|
|
||||||
|
#: core\pe\prioritize.py:23 core\pe\result_table.py:23
|
||||||
|
msgid "Dimensions"
|
||||||
|
msgstr "Boyutlar"
|
||||||
|
|
||||||
|
#: core\pe\result_table.py:21 core\se\result_table.py:21
|
||||||
|
msgid "Size (KB)"
|
||||||
|
msgstr "Boyut (KB)"
|
||||||
|
|
||||||
|
#: core\pe\result_table.py:24
|
||||||
|
msgid "EXIF Timestamp"
|
||||||
|
msgstr "EXIF Zaman damgası"
|
||||||
|
|
||||||
|
#: core\prioritize.py:156
|
||||||
|
msgid "Size"
|
||||||
|
msgstr "Boyut"
|
||||||
258
locale/tr/LC_MESSAGES/core.po
Normal file
258
locale/tr/LC_MESSAGES/core.po
Normal file
@@ -0,0 +1,258 @@
|
|||||||
|
# Translators:
|
||||||
|
# Ahmet Haydar Işık <itsahmthydr@gmail.com>, 2021
|
||||||
|
#
|
||||||
|
msgid ""
|
||||||
|
msgstr ""
|
||||||
|
"Last-Translator: Ahmet Haydar Işık <itsahmthydr@gmail.com>, 2021\n"
|
||||||
|
"Language-Team: Turkish (https://www.transifex.com/voltaicideas/teams/116153/tr/)\n"
|
||||||
|
"Language: tr\n"
|
||||||
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
|
"Content-Transfer-Encoding: utf-8\n"
|
||||||
|
"Plural-Forms: nplurals=2; plural=(n > 1);\n"
|
||||||
|
|
||||||
|
#: core\app.py:42
|
||||||
|
msgid "There are no marked duplicates. Nothing has been done."
|
||||||
|
msgstr "İşaretlenmiş kopya yok. Hiçbir işlem yapılmadı."
|
||||||
|
|
||||||
|
#: core\app.py:43
|
||||||
|
msgid "There are no selected duplicates. Nothing has been done."
|
||||||
|
msgstr "Seçilmiş kopya yok. Hiçbir işlem yapılmadı."
|
||||||
|
|
||||||
|
#: core\app.py:44
|
||||||
|
msgid ""
|
||||||
|
"You're about to open many files at once. Depending on what those files are "
|
||||||
|
"opened with, doing so can create quite a mess. Continue?"
|
||||||
|
msgstr ""
|
||||||
|
"Aynı anda birçok dosyayı açmak üzeresiniz. Bu dosyaların neyle açıldığına "
|
||||||
|
"bağlı olarak, bunu yapmak büyük karışıklık yaratabilir. Yine de devam "
|
||||||
|
"edilsin mi?"
|
||||||
|
|
||||||
|
#: core\app.py:71
|
||||||
|
msgid "Scanning for duplicates"
|
||||||
|
msgstr "Kopyalar için taranıyor"
|
||||||
|
|
||||||
|
#: core\app.py:72
|
||||||
|
msgid "Loading"
|
||||||
|
msgstr "Yükleniyor"
|
||||||
|
|
||||||
|
#: core\app.py:73
|
||||||
|
msgid "Moving"
|
||||||
|
msgstr "Taşınıyor"
|
||||||
|
|
||||||
|
#: core\app.py:74
|
||||||
|
msgid "Copying"
|
||||||
|
msgstr "Kopyalanıyor"
|
||||||
|
|
||||||
|
#: core\app.py:75
|
||||||
|
msgid "Sending to Trash"
|
||||||
|
msgstr "Geri Dönüşüm Kutusuna gönderiliyor"
|
||||||
|
|
||||||
|
#: core\app.py:308
|
||||||
|
msgid ""
|
||||||
|
"A previous action is still hanging in there. You can't start a new one yet. "
|
||||||
|
"Wait a few seconds, then try again."
|
||||||
|
msgstr ""
|
||||||
|
"Önceki eylem hala tamamlanmadı. Henüz yeni bir eylem başlatamazsınız. Birkaç"
|
||||||
|
" saniye bekleyin, ardından tekrar deneyin."
|
||||||
|
|
||||||
|
#: core\app.py:318
|
||||||
|
msgid "No duplicates found."
|
||||||
|
msgstr "Hiç kopya bulunamadı."
|
||||||
|
|
||||||
|
#: core\app.py:333
|
||||||
|
msgid "All marked files were copied successfully."
|
||||||
|
msgstr "İşaretlenmiş tüm dosyalar başarıyla kopyalandı."
|
||||||
|
|
||||||
|
#: core\app.py:334
|
||||||
|
msgid "All marked files were moved successfully."
|
||||||
|
msgstr "İşaretlenmiş tüm dosyalar başarıyla taşındı."
|
||||||
|
|
||||||
|
#: core\app.py:335
|
||||||
|
msgid "All marked files were successfully sent to Trash."
|
||||||
|
msgstr "İşaretlenmiş tüm dosyalar başarıyla Geri Dönüşüm Kutusuna gönderildi."
|
||||||
|
|
||||||
|
#: core\app.py:343
|
||||||
|
msgid "Could not load file: {}"
|
||||||
|
msgstr "Dosya yüklenemedi: {}"
|
||||||
|
|
||||||
|
#: core\app.py:399
|
||||||
|
msgid "'{}' already is in the list."
|
||||||
|
msgstr "'{}' zaten listede."
|
||||||
|
|
||||||
|
#: core\app.py:401
|
||||||
|
msgid "'{}' does not exist."
|
||||||
|
msgstr "'{}' mevcut değil."
|
||||||
|
|
||||||
|
#: core\app.py:410
|
||||||
|
msgid ""
|
||||||
|
"All selected %d matches are going to be ignored in all subsequent scans. "
|
||||||
|
"Continue?"
|
||||||
|
msgstr ""
|
||||||
|
"Tüm seçili %deşleşmeleri sonraki taramalarda yok sayılacaktır. Devam edilsin"
|
||||||
|
" mi?"
|
||||||
|
|
||||||
|
#: core\app.py:486
|
||||||
|
msgid "Select a directory to copy marked files to"
|
||||||
|
msgstr "İşaretlenmiş dosyaları kopyalamak için bir dizin seçin"
|
||||||
|
|
||||||
|
#: core\app.py:487
|
||||||
|
msgid "Select a directory to move marked files to"
|
||||||
|
msgstr "İşaretlenmiş dosyaları taşımak için bir dizin seçin"
|
||||||
|
|
||||||
|
#: core\app.py:527
|
||||||
|
msgid "Select a destination for your exported CSV"
|
||||||
|
msgstr "Dışa aktarılacak CSV dosyası için bir hedef seçin"
|
||||||
|
|
||||||
|
#: core\app.py:534 core\app.py:801 core\app.py:811
|
||||||
|
msgid "Couldn't write to file: {}"
|
||||||
|
msgstr "Dosyaya yazılamadı: {}"
|
||||||
|
|
||||||
|
#: core\app.py:559
|
||||||
|
msgid "You have no custom command set up. Set it up in your preferences."
|
||||||
|
msgstr "Özel bir komut ayarınız yok. Tercihlerinizden ayarlayabilirsiniz."
|
||||||
|
|
||||||
|
#: core\app.py:727 core\app.py:740
|
||||||
|
msgid "You are about to remove %d files from results. Continue?"
|
||||||
|
msgstr "Sonuçlardan%ddosyaları çıkarmak üzeresiniz. Devam edilsin mi?"
|
||||||
|
|
||||||
|
#: core\app.py:774
|
||||||
|
msgid "{} duplicate groups were changed by the re-prioritization."
|
||||||
|
msgstr "{} yinelenen gruplar, yeniden önceliklendirme ile değiştirildi."
|
||||||
|
|
||||||
|
#: core\app.py:821
|
||||||
|
msgid "The selected directories contain no scannable file."
|
||||||
|
msgstr "Seçili dizinler taranabilir dosya içermiyor."
|
||||||
|
|
||||||
|
#: core\app.py:835
|
||||||
|
msgid "Collecting files to scan"
|
||||||
|
msgstr "Taranacak dosyalar toplanıyor"
|
||||||
|
|
||||||
|
#: core\app.py:891
|
||||||
|
msgid "%s (%d discarded)"
|
||||||
|
msgstr "%s(%d atıldı)"
|
||||||
|
|
||||||
|
#: core\engine.py:244 core\engine.py:288
|
||||||
|
msgid "0 matches found"
|
||||||
|
msgstr "0 eşleşme bulundu"
|
||||||
|
|
||||||
|
#: core\engine.py:262 core\engine.py:296
|
||||||
|
msgid "%d matches found"
|
||||||
|
msgstr "%deşleşme bulundu"
|
||||||
|
|
||||||
|
#: core\gui\deletion_options.py:73
|
||||||
|
msgid "You are sending {} file(s) to the Trash."
|
||||||
|
msgstr "{} dosyayı/dosyaları Geri Dönüşüm Kutusuna gönderiyorsunuz."
|
||||||
|
|
||||||
|
#: core\gui\exclude_list_table.py:15
|
||||||
|
msgid "Regular Expressions"
|
||||||
|
msgstr "Düzenli İfadeler"
|
||||||
|
|
||||||
|
#: core\gui\ignore_list_dialog.py:25
|
||||||
|
msgid "Do you really want to remove all %d items from the ignore list?"
|
||||||
|
msgstr ""
|
||||||
|
"Yok sayılanlar listesinden %d öğelerin tümünü çıkarmak istediğinize emin "
|
||||||
|
"misiniz?"
|
||||||
|
|
||||||
|
#: core\me\scanner.py:20 core\se\scanner.py:16
|
||||||
|
msgid "Filename"
|
||||||
|
msgstr "Dosya adı"
|
||||||
|
|
||||||
|
#: core\me\scanner.py:21
|
||||||
|
msgid "Filename - Fields"
|
||||||
|
msgstr "Dosya adı - Alanlar"
|
||||||
|
|
||||||
|
#: core\me\scanner.py:22
|
||||||
|
msgid "Filename - Fields (No Order)"
|
||||||
|
msgstr "Dosya Adı - Alanlar (Düzen Yok)"
|
||||||
|
|
||||||
|
#: core\me\scanner.py:23
|
||||||
|
msgid "Tags"
|
||||||
|
msgstr "Etiketler"
|
||||||
|
|
||||||
|
#: core\me\scanner.py:24 core\pe\scanner.py:21 core\se\scanner.py:17
|
||||||
|
msgid "Contents"
|
||||||
|
msgstr "İçindekiler"
|
||||||
|
|
||||||
|
#: core\pe\matchblock.py:72
|
||||||
|
msgid "Analyzed %d/%d pictures"
|
||||||
|
msgstr " %d/%d resim analiz edildi."
|
||||||
|
|
||||||
|
#: core\pe\matchblock.py:181
|
||||||
|
msgid "Performed %d/%d chunk matches"
|
||||||
|
msgstr "%d/%d öbek eşleştirme gerçekleştirildi"
|
||||||
|
|
||||||
|
#: core\pe\matchblock.py:191
|
||||||
|
msgid "Preparing for matching"
|
||||||
|
msgstr "Eşleştirmek için hazırlanılıyor"
|
||||||
|
|
||||||
|
#: core\pe\matchblock.py:244
|
||||||
|
msgid "Verified %d/%d matches"
|
||||||
|
msgstr "%d/%d eşleşmeler doğrulandı"
|
||||||
|
|
||||||
|
#: core\pe\matchexif.py:19
|
||||||
|
msgid "Read EXIF of %d/%d pictures"
|
||||||
|
msgstr "%d/%d resimlerin EXIF'i okunuyor"
|
||||||
|
|
||||||
|
#: core\pe\scanner.py:22
|
||||||
|
msgid "EXIF Timestamp"
|
||||||
|
msgstr "EXIF Zaman damgası"
|
||||||
|
|
||||||
|
#: core\prioritize.py:70
|
||||||
|
msgid "None"
|
||||||
|
msgstr "Hiçbiri"
|
||||||
|
|
||||||
|
#: core\prioritize.py:100
|
||||||
|
msgid "Ends with number"
|
||||||
|
msgstr "Sayıyla biter"
|
||||||
|
|
||||||
|
#: core\prioritize.py:101
|
||||||
|
msgid "Doesn't end with number"
|
||||||
|
msgstr "Sayıyla bitmez"
|
||||||
|
|
||||||
|
#: core\prioritize.py:102
|
||||||
|
msgid "Longest"
|
||||||
|
msgstr "En uzun"
|
||||||
|
|
||||||
|
#: core\prioritize.py:103
|
||||||
|
msgid "Shortest"
|
||||||
|
msgstr "En kısa"
|
||||||
|
|
||||||
|
#: core\prioritize.py:140
|
||||||
|
msgid "Highest"
|
||||||
|
msgstr "En yüksek"
|
||||||
|
|
||||||
|
#: core\prioritize.py:140
|
||||||
|
msgid "Lowest"
|
||||||
|
msgstr "En düşük"
|
||||||
|
|
||||||
|
#: core\prioritize.py:169
|
||||||
|
msgid "Newest"
|
||||||
|
msgstr "En yeni"
|
||||||
|
|
||||||
|
#: core\prioritize.py:169
|
||||||
|
msgid "Oldest"
|
||||||
|
msgstr "En eski"
|
||||||
|
|
||||||
|
#: core\results.py:142
|
||||||
|
msgid "%d / %d (%s / %s) duplicates marked."
|
||||||
|
msgstr "%d/%d(%s/%s) kopyalar işaretlendi."
|
||||||
|
|
||||||
|
#: core\results.py:149
|
||||||
|
msgid " filter: %s"
|
||||||
|
msgstr "filtrele: %s"
|
||||||
|
|
||||||
|
#: core\scanner.py:85
|
||||||
|
msgid "Read size of %d/%d files"
|
||||||
|
msgstr "%d/%d dosyaların boyutunu oku"
|
||||||
|
|
||||||
|
#: core\scanner.py:109
|
||||||
|
msgid "Read metadata of %d/%d files"
|
||||||
|
msgstr "%d/%d dosyaların üst verisini(metadata) oku"
|
||||||
|
|
||||||
|
#: core\scanner.py:147
|
||||||
|
msgid "Almost done! Fiddling with results..."
|
||||||
|
msgstr "Neredeyse bitti! Sonuçlarla uğraşılıyor..."
|
||||||
|
|
||||||
|
#: core\se\scanner.py:18
|
||||||
|
msgid "Folders"
|
||||||
|
msgstr "Dizinler"
|
||||||
947
locale/tr/LC_MESSAGES/ui.po
Normal file
947
locale/tr/LC_MESSAGES/ui.po
Normal file
@@ -0,0 +1,947 @@
|
|||||||
|
# Translators:
|
||||||
|
# Ahmet Haydar Işık <itsahmthydr@gmail.com>, 2021
|
||||||
|
#
|
||||||
|
msgid ""
|
||||||
|
msgstr ""
|
||||||
|
"Last-Translator: Ahmet Haydar Işık <itsahmthydr@gmail.com>, 2021\n"
|
||||||
|
"Language-Team: Turkish (https://www.transifex.com/voltaicideas/teams/116153/tr/)\n"
|
||||||
|
"Language: tr\n"
|
||||||
|
"Content-Type: text/plain; charset=UTF-8\n"
|
||||||
|
"Content-Transfer-Encoding: utf-8\n"
|
||||||
|
"Plural-Forms: nplurals=2; plural=(n > 1);\n"
|
||||||
|
|
||||||
|
#: qt/app.py:81
|
||||||
|
msgid "Quit"
|
||||||
|
msgstr "Çık"
|
||||||
|
|
||||||
|
#: qt/app.py:82 qt/preferences_dialog.py:116
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Options"
|
||||||
|
msgstr "Seçenekler"
|
||||||
|
|
||||||
|
#: qt/app.py:83 qt/ignore_list_dialog.py:32
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Ignore List"
|
||||||
|
msgstr "Yoksayılanlar Listesi"
|
||||||
|
|
||||||
|
#: qt/app.py:84 qt/app.py:179 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Clear Picture Cache"
|
||||||
|
msgstr "Resim Önbelliğini Temizle"
|
||||||
|
|
||||||
|
#: qt/app.py:85 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "dupeGuru Help"
|
||||||
|
msgstr "dupeGuru Yardımı"
|
||||||
|
|
||||||
|
#: qt/app.py:86 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "About dupeGuru"
|
||||||
|
msgstr "dupeGuru Hakkında"
|
||||||
|
|
||||||
|
#: qt/app.py:87
|
||||||
|
msgid "Open Debug Log"
|
||||||
|
msgstr "Hata Ayıklama Günlüğünü Aç"
|
||||||
|
|
||||||
|
#: qt/app.py:180 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Do you really want to remove all your cached picture analysis?"
|
||||||
|
msgstr ""
|
||||||
|
"Önbelleğe alınmış tüm resim analizlerinizi gerçekten kaldırmak istiyor "
|
||||||
|
"musunuz?"
|
||||||
|
|
||||||
|
#: qt/app.py:184
|
||||||
|
msgid "Picture cache cleared."
|
||||||
|
msgstr "Resim önbelleği temizlendi."
|
||||||
|
|
||||||
|
#: qt/app.py:251
|
||||||
|
msgid "{} file (*.{})"
|
||||||
|
msgstr "{} dosya (*.{})"
|
||||||
|
|
||||||
|
#: qt/deletion_options.py:30 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Deletion Options"
|
||||||
|
msgstr "Silme Seçenekleri"
|
||||||
|
|
||||||
|
#: qt/deletion_options.py:35 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Link deleted files"
|
||||||
|
msgstr "Silinen dosyaları bağla"
|
||||||
|
|
||||||
|
#: qt/deletion_options.py:37 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid ""
|
||||||
|
"After having deleted a duplicate, place a link targeting the reference file "
|
||||||
|
"to replace the deleted file."
|
||||||
|
msgstr ""
|
||||||
|
"Bir kopyayı sildikten sonra, silinen dosyayı değiştirmek için referans "
|
||||||
|
"dosyayı hedefleyen bir bağlantı yerleştirin."
|
||||||
|
|
||||||
|
#: qt/deletion_options.py:44
|
||||||
|
msgid "Hardlink"
|
||||||
|
msgstr "Hard link"
|
||||||
|
|
||||||
|
#: qt/deletion_options.py:44
|
||||||
|
msgid "Symlink"
|
||||||
|
msgstr "Sembolik link"
|
||||||
|
|
||||||
|
#: qt/deletion_options.py:48
|
||||||
|
msgid " (unsupported)"
|
||||||
|
msgstr "(desteklenmiyor)"
|
||||||
|
|
||||||
|
#: qt/deletion_options.py:49 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Directly delete files"
|
||||||
|
msgstr "Doğrudan dosyaları sil"
|
||||||
|
|
||||||
|
#: qt/deletion_options.py:51 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid ""
|
||||||
|
"Instead of sending files to trash, delete them directly. This option is "
|
||||||
|
"usually used as a workaround when the normal deletion method doesn't work."
|
||||||
|
msgstr ""
|
||||||
|
"Dosyaları Geri Dönüşüm Kutusuna göndermek yerine, onları doğrudan sil. Bu "
|
||||||
|
"seçenek genellikle normal silme yöntemi çalışmadığında geçici bir çözüm "
|
||||||
|
"olarak kullanılır."
|
||||||
|
|
||||||
|
#: qt/deletion_options.py:59 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Proceed"
|
||||||
|
msgstr "Devam Et"
|
||||||
|
|
||||||
|
#: qt/deletion_options.py:60 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Cancel"
|
||||||
|
msgstr "İptal Et"
|
||||||
|
|
||||||
|
#: qt/details_table.py:16 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Attribute"
|
||||||
|
msgstr "Özellik"
|
||||||
|
|
||||||
|
#: qt/details_table.py:16 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Selected"
|
||||||
|
msgstr "Seçili"
|
||||||
|
|
||||||
|
#: qt/details_table.py:16 qt/directories_model.py:24
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Reference"
|
||||||
|
msgstr "Referans"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:64 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Load Results..."
|
||||||
|
msgstr "Sonuçları Yükle..."
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:65 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Results Window"
|
||||||
|
msgstr "Sonuç Penceresi"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:66
|
||||||
|
msgid "Add Folder..."
|
||||||
|
msgstr "Dizini Ekle..."
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:74 qt/result_window.py:100
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "File"
|
||||||
|
msgstr "Dosya"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:76 qt/result_window.py:108
|
||||||
|
msgid "View"
|
||||||
|
msgstr "Görüntüle"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:78 qt/result_window.py:110
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Help"
|
||||||
|
msgstr "Yardım"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:80 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Load Recent Results"
|
||||||
|
msgstr "Son Sonuçları Yükle"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:116 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Application Mode:"
|
||||||
|
msgstr "Uygulama Modu:"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:121 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Music"
|
||||||
|
msgstr "Müzik"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:121 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Picture"
|
||||||
|
msgstr "Resim"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:121 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Standard"
|
||||||
|
msgstr "Standart"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:128 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Scan Type:"
|
||||||
|
msgstr "Tarama Türü:"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:135
|
||||||
|
msgid "More Options"
|
||||||
|
msgstr "Daha Fazla Seçenek"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:139 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Select folders to scan and press \"Scan\"."
|
||||||
|
msgstr "Taranacak dizinleri seçin ve \"Tara\"ya basın."
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:163 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Load Results"
|
||||||
|
msgstr "Sonuçları Yükle"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:166 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Scan"
|
||||||
|
msgstr "Tara"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:230
|
||||||
|
msgid "Unsaved results"
|
||||||
|
msgstr "Kaydedilmeyen sonuçlar"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:231 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "You have unsaved results, do you really want to quit?"
|
||||||
|
msgstr "Kaydedilmeyen sonuçlarınız var, gerçekten çıkmak istiyor musunuz?"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:239 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Select a folder to add to the scanning list"
|
||||||
|
msgstr "Tarama listesine eklemek için bir klasör seçin"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:266 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Select a results file to load"
|
||||||
|
msgstr "Yüklenecek bir sonuç dosyası seçin"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:267
|
||||||
|
msgid "All Files (*.*)"
|
||||||
|
msgstr "Tüm Dosyalar (*.*)"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:267 qt/result_window.py:311
|
||||||
|
msgid "dupeGuru Results (*.dupeguru)"
|
||||||
|
msgstr "dupeGuru Sonuçları (*.dupeguru)"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:278
|
||||||
|
msgid "Start a new scan"
|
||||||
|
msgstr "Yeni bir tarama başlat"
|
||||||
|
|
||||||
|
#: qt/directories_dialog.py:279 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "You have unsaved results, do you really want to continue?"
|
||||||
|
msgstr ""
|
||||||
|
"Kaydedilmeyen sonuçlarınız var, gerçekten devam etmek istiyor musunuz?"
|
||||||
|
|
||||||
|
#: qt/directories_model.py:23 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Name"
|
||||||
|
msgstr "İsim"
|
||||||
|
|
||||||
|
#: qt/directories_model.py:23 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "State"
|
||||||
|
msgstr "Durum"
|
||||||
|
|
||||||
|
#: qt/directories_model.py:24 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Excluded"
|
||||||
|
msgstr "Hariç Tutulan"
|
||||||
|
|
||||||
|
#: qt/directories_model.py:24 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Normal"
|
||||||
|
msgstr "Normal"
|
||||||
|
|
||||||
|
#: qt/ignore_list_dialog.py:45 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Remove Selected"
|
||||||
|
msgstr "Seçili Öğeyi Kaldır"
|
||||||
|
|
||||||
|
#: qt/ignore_list_dialog.py:46 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Clear"
|
||||||
|
msgstr "Temizle"
|
||||||
|
|
||||||
|
#: qt/ignore_list_dialog.py:47 qt/problem_dialog.py:61
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Close"
|
||||||
|
msgstr "Kapat"
|
||||||
|
|
||||||
|
#: qt/me/details_dialog.py:18 qt/pe/details_dialog.py:24
|
||||||
|
#: qt/result_window.py:56 qt/result_window.py:192 qt/se/details_dialog.py:18
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Details"
|
||||||
|
msgstr "Ayrıntılar"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:30 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Tags to scan:"
|
||||||
|
msgstr "Taranacak etiketler:"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:36 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Track"
|
||||||
|
msgstr "Parça"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:38 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Artist"
|
||||||
|
msgstr "Sanatçı"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:40 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Album"
|
||||||
|
msgstr "Albüm"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:42 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Title"
|
||||||
|
msgstr "Başlık"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:44 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Genre"
|
||||||
|
msgstr "Tarz"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:46 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Year"
|
||||||
|
msgstr "Yıl"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:50 qt/se/preferences_dialog.py:30
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Word weighting"
|
||||||
|
msgstr "Kelimeleri tartma"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:52 qt/se/preferences_dialog.py:32
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Match similar words"
|
||||||
|
msgstr "Benzer kelimeleri eşleştir"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:54 qt/pe/preferences_dialog.py:21
|
||||||
|
#: qt/se/preferences_dialog.py:34 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Can mix file kind"
|
||||||
|
msgstr "Dosya türünü karıştırabilir"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:56 qt/pe/preferences_dialog.py:23
|
||||||
|
#: qt/se/preferences_dialog.py:36 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Use regular expressions when filtering"
|
||||||
|
msgstr "Filtrelerken normal ifadeler kullan"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:58 qt/pe/preferences_dialog.py:25
|
||||||
|
#: qt/se/preferences_dialog.py:38 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Remove empty folders on delete or move"
|
||||||
|
msgstr "Silme veya taşıma sırasında boş klasörleri kaldır"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:60 qt/pe/preferences_dialog.py:27
|
||||||
|
#: qt/se/preferences_dialog.py:59 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Ignore duplicates hardlinking to the same file"
|
||||||
|
msgstr "Aynı dosyaya sabit bağlantı(hardlink) yapan kopyaları yoksay"
|
||||||
|
|
||||||
|
#: qt/me/preferences_dialog.py:62 qt/pe/preferences_dialog.py:29
|
||||||
|
#: qt/se/preferences_dialog.py:62 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Debug mode (restart required)"
|
||||||
|
msgstr "Hata ayıklama modu (yeniden başlatma gerektirir)"
|
||||||
|
|
||||||
|
#: qt/pe/preferences_dialog.py:19 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Match pictures of different dimensions"
|
||||||
|
msgstr "Farklı boyutlardaki resimleri eşleştir"
|
||||||
|
|
||||||
|
#: qt/preferences_dialog.py:43
|
||||||
|
msgid "Filter Hardness:"
|
||||||
|
msgstr "Filtre Sertliği:"
|
||||||
|
|
||||||
|
#: qt/preferences_dialog.py:69
|
||||||
|
msgid "More Results"
|
||||||
|
msgstr "Daha Fazla Sonuç"
|
||||||
|
|
||||||
|
#: qt/preferences_dialog.py:74
|
||||||
|
msgid "Fewer Results"
|
||||||
|
msgstr "Daha Az Sonuç"
|
||||||
|
|
||||||
|
#: qt/preferences_dialog.py:81
|
||||||
|
msgid "Font size:"
|
||||||
|
msgstr "Yazı boyutu:"
|
||||||
|
|
||||||
|
#: qt/preferences_dialog.py:85
|
||||||
|
msgid "Language:"
|
||||||
|
msgstr "Dil:"
|
||||||
|
|
||||||
|
#: qt/preferences_dialog.py:91 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Copy and Move:"
|
||||||
|
msgstr "Kopyala ve Taşı:"
|
||||||
|
|
||||||
|
#: qt/preferences_dialog.py:94 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Right in destination"
|
||||||
|
msgstr "Doğrudan hedef klasöre"
|
||||||
|
|
||||||
|
#: qt/preferences_dialog.py:95 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Recreate relative path"
|
||||||
|
msgstr "Göreli yolu yeniden oluştur"
|
||||||
|
|
||||||
|
#: qt/preferences_dialog.py:96 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Recreate absolute path"
|
||||||
|
msgstr "Mutlak yolu yeniden oluştur"
|
||||||
|
|
||||||
|
#: qt/preferences_dialog.py:99
|
||||||
|
msgid "Custom Command (arguments: %d for dupe, %r for ref):"
|
||||||
|
msgstr "Özel Komut (argümanlar: %d kopya için, %r referans için):"
|
||||||
|
|
||||||
|
#: qt/preferences_dialog.py:174
|
||||||
|
msgid "dupeGuru has to restart for language changes to take effect."
|
||||||
|
msgstr ""
|
||||||
|
"Dil değişiklerinin etkili olması için dupeGuru'nun yeniden başlatılması "
|
||||||
|
"gerekir."
|
||||||
|
|
||||||
|
#: qt/prioritize_dialog.py:75 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Re-Prioritize duplicates"
|
||||||
|
msgstr "Kopyaları yeniden önceliklendirin"
|
||||||
|
|
||||||
|
#: qt/prioritize_dialog.py:79 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid ""
|
||||||
|
"Add criteria to the right box and click OK to send the dupes that correspond"
|
||||||
|
" the best to these criteria to their respective group's reference position. "
|
||||||
|
"Read the help file for more information."
|
||||||
|
msgstr ""
|
||||||
|
"Sağdaki kutuya kriter ekleyin ve bu kriterlere en iyi uyan kopyaları ilgili "
|
||||||
|
"grubun referans konumuna göndermek için Tamam'a tıklayın. Daha fazla bilgi "
|
||||||
|
"için yardım dosyasını okuyun."
|
||||||
|
|
||||||
|
#: qt/problem_dialog.py:33 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Problems!"
|
||||||
|
msgstr "Problemler!"
|
||||||
|
|
||||||
|
#: qt/problem_dialog.py:37 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid ""
|
||||||
|
"There were problems processing some (or all) of the files. The cause of "
|
||||||
|
"these problems are described in the table below. Those files were not "
|
||||||
|
"removed from your results."
|
||||||
|
msgstr ""
|
||||||
|
"Dosyaların bazılarını (veya tümünü) işlerken sorunlar oluştu. Bu sorunların "
|
||||||
|
"nedeni aşağıdaki tabloda açıklanmıştır. Bu dosyalar sonuçlarınızdan "
|
||||||
|
"kaldırılmadı."
|
||||||
|
|
||||||
|
#: qt/problem_dialog.py:56
|
||||||
|
msgid "Reveal Selected"
|
||||||
|
msgstr "Seçili Öğeyi Göster"
|
||||||
|
|
||||||
|
#: qt/result_window.py:57 qt/result_window.py:104 qt/result_window.py:167
|
||||||
|
#: qt/result_window.py:191 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Actions"
|
||||||
|
msgstr "Eylemler"
|
||||||
|
|
||||||
|
#: qt/result_window.py:58 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Show Dupes Only"
|
||||||
|
msgstr "Sadece Kopyaları Göster"
|
||||||
|
|
||||||
|
#: qt/result_window.py:59 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Show Delta Values"
|
||||||
|
msgstr "Delta Değerlerini Göster"
|
||||||
|
|
||||||
|
#: qt/result_window.py:60
|
||||||
|
msgid "Send Marked to Recycle Bin..."
|
||||||
|
msgstr "İşaretlileri Geri Dönüşüm Kutusuna Gönder..."
|
||||||
|
|
||||||
|
#: qt/result_window.py:61 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Move Marked to..."
|
||||||
|
msgstr "İşaretlileri Şuraya Taşı..."
|
||||||
|
|
||||||
|
#: qt/result_window.py:62 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Copy Marked to..."
|
||||||
|
msgstr "İşaretlileri Şuraya Kopyala..."
|
||||||
|
|
||||||
|
#: qt/result_window.py:63 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Remove Marked from Results"
|
||||||
|
msgstr "Sonuçlardan İşaretlileri Kaldır"
|
||||||
|
|
||||||
|
#: qt/result_window.py:64 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Re-Prioritize Results..."
|
||||||
|
msgstr "Sonuçları yeniden önceliklendirin..."
|
||||||
|
|
||||||
|
#: qt/result_window.py:67 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Remove Selected from Results"
|
||||||
|
msgstr "Sonuçlardan Seçili Öğeyi Kaldır"
|
||||||
|
|
||||||
|
#: qt/result_window.py:71 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Add Selected to Ignore List"
|
||||||
|
msgstr "Seçili Öğeyi Yok Sayılanlar Listesine Ekle"
|
||||||
|
|
||||||
|
#: qt/result_window.py:75 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Make Selected into Reference"
|
||||||
|
msgstr "Seçili Öğeyi Referans Yap"
|
||||||
|
|
||||||
|
#: qt/result_window.py:77 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Open Selected with Default Application"
|
||||||
|
msgstr "Seçili Öğeyi Varsayılan Uygulama ile Aç"
|
||||||
|
|
||||||
|
#: qt/result_window.py:80
|
||||||
|
msgid "Open Containing Folder of Selected"
|
||||||
|
msgstr "Seçili Öğenin Bulunduğu Klasörü Aç"
|
||||||
|
|
||||||
|
#: qt/result_window.py:82 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Rename Selected"
|
||||||
|
msgstr "Seçili Öğeyi Yeniden Adlandır"
|
||||||
|
|
||||||
|
#: qt/result_window.py:83 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Mark All"
|
||||||
|
msgstr "Tümünü İşaretle"
|
||||||
|
|
||||||
|
#: qt/result_window.py:84 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Mark None"
|
||||||
|
msgstr "Hiçbirini İşaretleme"
|
||||||
|
|
||||||
|
#: qt/result_window.py:85 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Invert Marking"
|
||||||
|
msgstr "İşaretleri Tersine Çevir"
|
||||||
|
|
||||||
|
#: qt/result_window.py:86 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Mark Selected"
|
||||||
|
msgstr "Seçili Öğeyi İşaretle"
|
||||||
|
|
||||||
|
#: qt/result_window.py:87
|
||||||
|
msgid "Export To HTML"
|
||||||
|
msgstr "HTML'ye Aktar"
|
||||||
|
|
||||||
|
#: qt/result_window.py:88
|
||||||
|
msgid "Export To CSV"
|
||||||
|
msgstr "CSV'ye Aktar"
|
||||||
|
|
||||||
|
#: qt/result_window.py:89 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Save Results..."
|
||||||
|
msgstr "Sonuçları Kaydet..."
|
||||||
|
|
||||||
|
#: qt/result_window.py:90 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Invoke Custom Command"
|
||||||
|
msgstr "Özel Komutu Çağır"
|
||||||
|
|
||||||
|
#: qt/result_window.py:102
|
||||||
|
msgid "Mark"
|
||||||
|
msgstr "İşaret"
|
||||||
|
|
||||||
|
#: qt/result_window.py:106 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Columns"
|
||||||
|
msgstr "Sütunlar"
|
||||||
|
|
||||||
|
#: qt/result_window.py:163
|
||||||
|
msgid "Reset to Defaults"
|
||||||
|
msgstr "Varsayılanlara Dön"
|
||||||
|
|
||||||
|
#: qt/result_window.py:185
|
||||||
|
msgid "{} Results"
|
||||||
|
msgstr "{} Sonuçlar"
|
||||||
|
|
||||||
|
#: qt/result_window.py:193 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Dupes Only"
|
||||||
|
msgstr "Sadece Kopyalar"
|
||||||
|
|
||||||
|
#: qt/result_window.py:194
|
||||||
|
msgid "Delta Values"
|
||||||
|
msgstr "Delta Değerleri"
|
||||||
|
|
||||||
|
#: qt/result_window.py:310 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Select a file to save your results to"
|
||||||
|
msgstr "Sonuçlarınızı kaydetmek için bir dosya seçin"
|
||||||
|
|
||||||
|
#: qt/se/preferences_dialog.py:41
|
||||||
|
msgid "Ignore files smaller than"
|
||||||
|
msgstr "Şu boyuttan küçük dosyaları yoksay"
|
||||||
|
|
||||||
|
#: qt/se/preferences_dialog.py:52 cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "KB"
|
||||||
|
msgstr "KB"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "%@ Results"
|
||||||
|
msgstr "%@ Sonuçlar"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Action"
|
||||||
|
msgstr "Eylem"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Add New Folder..."
|
||||||
|
msgstr "Yeni Klasör Ekle..."
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Advanced"
|
||||||
|
msgstr "Gelişmiş"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Automatically check for updates"
|
||||||
|
msgstr "Güncellemeleri otomatik olarak kontrol et"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Basic"
|
||||||
|
msgstr "Temel"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Bring All to Front"
|
||||||
|
msgstr "Tümünü Öne Getir"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Check for update..."
|
||||||
|
msgstr "Güncellemeleri kontrol et..."
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Close Window"
|
||||||
|
msgstr "Pencereyi Kapat"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Copy"
|
||||||
|
msgstr "Kopyala"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Custom command (arguments: %d for dupe, %r for ref):"
|
||||||
|
msgstr "Özel komutlar (argümanlar: %d kopya için, %r referans için):"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Cut"
|
||||||
|
msgstr "Kes"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Delta"
|
||||||
|
msgstr "Delta"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Details of Selected File"
|
||||||
|
msgstr "Seçili Dosyanın Ayrıntıları"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Details Panel"
|
||||||
|
msgstr "Ayrıntılar Paneli"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Directories"
|
||||||
|
msgstr "Dizinler"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "dupeGuru"
|
||||||
|
msgstr "dupeGuru"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "dupeGuru Preferences"
|
||||||
|
msgstr "dupeGuru Tercihleri"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "dupeGuru Results"
|
||||||
|
msgstr "dupeGuru Sonuçları"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "dupeGuru Website"
|
||||||
|
msgstr "dupeGuru Websitesi"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Edit"
|
||||||
|
msgstr "Düzenle"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Export Results to CSV"
|
||||||
|
msgstr "Sonuçları CSV'ye Aktar"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Export Results to XHTML"
|
||||||
|
msgstr "Sonuçları XHTML'ye Aktar"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Fewer results"
|
||||||
|
msgstr "Daha az sonuç"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Filter"
|
||||||
|
msgstr "Filtrele"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Filter hardness:"
|
||||||
|
msgstr "Filtre sertliği:"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Filter Results..."
|
||||||
|
msgstr "Sonuçları Filtrele..."
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Folder Selection Window"
|
||||||
|
msgstr "Klasör Seçim Penceresi"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Font Size:"
|
||||||
|
msgstr "Yazı Tipi Boyutu:"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Hide dupeGuru"
|
||||||
|
msgstr "dupeGuru'yu Gizle"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Hide Others"
|
||||||
|
msgstr "Diğerlerini Gizle"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Ignore files smaller than:"
|
||||||
|
msgstr "Şu boyuttan küçük dosyaları yoksay:"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Load from file..."
|
||||||
|
msgstr "Dosyadan yükle..."
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Minimize"
|
||||||
|
msgstr "Küçült"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Mode"
|
||||||
|
msgstr "Mod"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "More results"
|
||||||
|
msgstr "Daha fazla sonuç"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Ok"
|
||||||
|
msgstr "Tamam"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Paste"
|
||||||
|
msgstr "Yapıştır"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Preferences..."
|
||||||
|
msgstr "Tercihler..."
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Quick Look"
|
||||||
|
msgstr "Hızlı Bakış"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Quit dupeGuru"
|
||||||
|
msgstr "dupeGuru'dan Çık"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Reset to Default"
|
||||||
|
msgstr "Varsayılana Dön"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Reset To Defaults"
|
||||||
|
msgstr "Varsayılanlara Dön"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Reveal"
|
||||||
|
msgstr "Ortaya Çıkar"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Reveal Selected in Finder"
|
||||||
|
msgstr "Seçili Öğeyi Finder'da Göster"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Select All"
|
||||||
|
msgstr "Tümünü Seç"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Send Marked to Trash..."
|
||||||
|
msgstr "İşaretlileri Geri Dönüşüm Kutusuna Gönder..."
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Services"
|
||||||
|
msgstr "Hizmetler"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Show All"
|
||||||
|
msgstr "Tümünü Göster"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Start Duplicate Scan"
|
||||||
|
msgstr "Kopyaları Taramayı Başlat"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "The name '%@' already exists."
|
||||||
|
msgstr "'%@' adı zaten var."
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Window"
|
||||||
|
msgstr "Pencere"
|
||||||
|
|
||||||
|
#: cocoa/en.lproj/Localizable.strings:0
|
||||||
|
msgid "Zoom"
|
||||||
|
msgstr "Yakınlaştır"
|
||||||
|
|
||||||
|
#: qt\app.py:158
|
||||||
|
msgid "Exclusion Filters"
|
||||||
|
msgstr "Harici Bırakma Filtreleri"
|
||||||
|
|
||||||
|
#: qt\directories_dialog.py:91
|
||||||
|
msgid "Scan Results"
|
||||||
|
msgstr "Tarama Sonuçları"
|
||||||
|
|
||||||
|
#: qt\directories_dialog.py:95
|
||||||
|
msgid "Load Directories..."
|
||||||
|
msgstr "Dizinleri Yükle..."
|
||||||
|
|
||||||
|
#: qt\directories_dialog.py:96
|
||||||
|
msgid "Save Directories..."
|
||||||
|
msgstr "Dizinleri Kaydet..."
|
||||||
|
|
||||||
|
#: qt\directories_dialog.py:337
|
||||||
|
msgid "Select a directories file to load"
|
||||||
|
msgstr "Yüklenecek bir dizin dosyası seçin"
|
||||||
|
|
||||||
|
#: qt\directories_dialog.py:338
|
||||||
|
msgid "dupeGuru Results (*.dupegurudirs)"
|
||||||
|
msgstr "dupeGuru Sonuçları (*.dupegurudirs)"
|
||||||
|
|
||||||
|
#: qt\directories_dialog.py:347
|
||||||
|
msgid "Select a file to save your directories to"
|
||||||
|
msgstr "Dizinlerinizi kaydetmek için bir dosya seçin"
|
||||||
|
|
||||||
|
#: qt\directories_dialog.py:348
|
||||||
|
msgid "dupeGuru Directories (*.dupegurudirs)"
|
||||||
|
msgstr "dupeGuru Dizinleri (*.dupegurudirs)"
|
||||||
|
|
||||||
|
#: qt\exclude_list_dialog.py:44
|
||||||
|
msgid "Add"
|
||||||
|
msgstr "Ekle"
|
||||||
|
|
||||||
|
#: qt\exclude_list_dialog.py:46
|
||||||
|
msgid "Restore defaults"
|
||||||
|
msgstr "Varsayılanları geri yükle"
|
||||||
|
|
||||||
|
#: qt\exclude_list_dialog.py:47
|
||||||
|
msgid "Test string"
|
||||||
|
msgstr "Test dizisi"
|
||||||
|
|
||||||
|
#: qt\exclude_list_dialog.py:83
|
||||||
|
msgid "Type a python regular expression here..."
|
||||||
|
msgstr "Buraya bir Python düzenli ifadesi yazın..."
|
||||||
|
|
||||||
|
#: qt\exclude_list_dialog.py:85
|
||||||
|
msgid "Type a file system path or filename here..."
|
||||||
|
msgstr "Buraya bir dosya sistemi yolu veya dosya adı yazın..."
|
||||||
|
|
||||||
|
#: qt\exclude_list_dialog.py:152
|
||||||
|
msgid ""
|
||||||
|
"These (case sensitive) python regular expressions will filter out files during scans.<br>Directores will also have their <strong>default state</strong> set to Excluded in the Directories tab if their name happen to match one of the regular expressions.<br>For each file collected two tests are perfomed on each of them to determine whether or not to filter them out:<br><li>1. Regular expressions with no path separator in them will be compared to the file name only.</li>\n"
|
||||||
|
"<li>2. Regular expressions with no path separator in them will be compared to the full path to the file.</li><br>\n"
|
||||||
|
"Example: if you want to filter out .PNG files from the \"My Pictures\" directory only:<br><code>.*My\\sPictures\\\\.*\\.png</code><br><br>You can test the regular expression with the test string feature by pasting a fake path in it:<br><code>C:\\\\User\\My Pictures\\test.png</code><br><br>\n"
|
||||||
|
"Matching regular expressions will be highlighted.<br>If there is at least one highlight, the path tested will be ignored during scans.<br><br>Directories and files starting with a period '.' are filtered out by default.<br><br>"
|
||||||
|
msgstr ""
|
||||||
|
"Bu (büyük/küçük harfe duyarlı) python düzenli ifadeleri, taramalar sırasında dosyaları filtreleyecektir.<br>Dizinler ayrıca, adları seçilen düzenli ifadelerden biriyle eşleşirse, Dizinler sekmesinde<strong>varsayılan durumları</strong>Hariç Tutuldu olarak ayarlanır.<br>Toplanan her dosya için, tamamen göz ardı edilip edilmeyeceğini belirlemek için dosyaların her birinin üzerinde iki test yapılır:<br><li>1. İçinde yol ayırıcı olmayan düzenli ifadeler yalnızca dosya adıyla karşılaştırılır.</li>\n"
|
||||||
|
"<li>2. İçinde yol ayırıcı olmayan normal ifadeler, dosyanın tam yolu ile karşılaştırılacaktır.</li><br>\n"
|
||||||
|
"Örnek: .PNG dosyalarını yalnızca \"My Pictures\" dizininden filtrelemek istiyorsanız:<br><code>.*My\\sPictures\\\\.*\\.png</code><br><br>Düzenli ifadeyi, test dizesi özelliğinin içine sahte bir yol yapıştırarak test edebilirsiniz:<br><code>C:\\\\User\\My Pictures\\test.png</code><br><br>\n"
|
||||||
|
"Eşleşen normal ifadeler vurgulanacaktır.<br>En az bir vurgu varsa, test edilen yol taramalar sırasında yok sayılır<br><br>Nokta '.' ile başlayan dizinler ve dosyalar varsayılan olarak filtrelenir.<br><br>"
|
||||||
|
|
||||||
|
#: qt\exclude_list_table.py:36
|
||||||
|
msgid "Compilation error: "
|
||||||
|
msgstr "Derleme hatası:"
|
||||||
|
|
||||||
|
#: qt\pe\image_viewer.py:56
|
||||||
|
msgid "Increase zoom"
|
||||||
|
msgstr "Yakınlaştırmayı arttır"
|
||||||
|
|
||||||
|
#: qt\pe\image_viewer.py:66
|
||||||
|
msgid "Decrease zoom"
|
||||||
|
msgstr "Yakınlaştırmayı azalt"
|
||||||
|
|
||||||
|
#: qt\pe\image_viewer.py:71
|
||||||
|
msgid "Ctrl+/"
|
||||||
|
msgstr "Ctrl+/"
|
||||||
|
|
||||||
|
#: qt\pe\image_viewer.py:76
|
||||||
|
msgid "Normal size"
|
||||||
|
msgstr "Normal boyut"
|
||||||
|
|
||||||
|
#: qt\pe\image_viewer.py:81
|
||||||
|
msgid "Ctrl+*"
|
||||||
|
msgstr "Ctrl+*"
|
||||||
|
|
||||||
|
#: qt\pe\image_viewer.py:86
|
||||||
|
msgid "Best fit"
|
||||||
|
msgstr "En uygun"
|
||||||
|
|
||||||
|
#: qt\pe\preferences_dialog.py:49
|
||||||
|
msgid "Picture cache mode:"
|
||||||
|
msgstr "Resim önbellek modu:"
|
||||||
|
|
||||||
|
#: qt\pe\preferences_dialog.py:56
|
||||||
|
msgid "Override theme icons in viewer toolbar"
|
||||||
|
msgstr "Görüntüleyici araç çubuğundaki tema simgelerini geçersiz kıl"
|
||||||
|
|
||||||
|
#: qt\pe\preferences_dialog.py:58
|
||||||
|
msgid ""
|
||||||
|
"Use our own internal icons instead of those provided by the theme engine"
|
||||||
|
msgstr ""
|
||||||
|
"Tema motoru tarafından sağlananlar yerine kendi dahili simgelerimizi "
|
||||||
|
"kullanın"
|
||||||
|
|
||||||
|
#: qt\pe\preferences_dialog.py:66
|
||||||
|
msgid "Show scrollbars in image viewers"
|
||||||
|
msgstr "Resim görüntüleyicilerde kaydırma çubuklarını göster"
|
||||||
|
|
||||||
|
#: qt\pe\preferences_dialog.py:68
|
||||||
|
msgid ""
|
||||||
|
"When the image displayed doesn't fit the viewport, show scrollbars to span "
|
||||||
|
"the view around"
|
||||||
|
msgstr ""
|
||||||
|
"Görüntülenen görüntü görünüm alanına sığmadığında, görünümü etrafa yaymak "
|
||||||
|
"için kaydırma çubuklarını göster"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:156
|
||||||
|
msgid "Use default position for tab bar (requires restart)"
|
||||||
|
msgstr ""
|
||||||
|
"Sekme çubuğu için varsayılan konumu kullan (yeniden başlatma gerektirir)"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:158
|
||||||
|
msgid ""
|
||||||
|
"Place the tab bar below the main menu instead of next to it\n"
|
||||||
|
"On MacOS, the tab bar will fill up the window's width instead."
|
||||||
|
msgstr ""
|
||||||
|
"Sekme çubuğunu ana menünün yanına değil altına yerleştirin\n"
|
||||||
|
"MacOS'ta sekme çubuğu bunun yerine pencerenin genişliğini dolduracaktır."
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:172
|
||||||
|
msgid "Use bold font for references"
|
||||||
|
msgstr "Referanslar için kalın yazı tipi kullanın"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:176
|
||||||
|
msgid "Reference foreground color:"
|
||||||
|
msgstr "Referans ön plan rengi:"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:179
|
||||||
|
msgid "Reference background color:"
|
||||||
|
msgstr "Referans arka plan rengi:"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:182 qt\preferences_dialog.py:216
|
||||||
|
msgid "Delta foreground color:"
|
||||||
|
msgstr "Delta ön plan rengi:"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:195
|
||||||
|
msgid "Show the title bar and can be docked"
|
||||||
|
msgstr "Başlık çubuğunu görüntüleyebilir ve sabitleyebilirsiniz."
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:197
|
||||||
|
msgid ""
|
||||||
|
"While the title bar is hidden, use the modifier key to drag the floating "
|
||||||
|
"window around"
|
||||||
|
msgstr ""
|
||||||
|
"Başlık çubuğu gizliyken, kayan pencereyi etrafında sürüklemek için "
|
||||||
|
"değiştirici tuşu kullanın."
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:199
|
||||||
|
msgid "The title bar can only be disabled while the window is docked"
|
||||||
|
msgstr ""
|
||||||
|
"Başlık çubuğu yalnızca pencere sabitlendiğinde devre dışı bırakılabilir"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:202
|
||||||
|
msgid "Vertical title bar"
|
||||||
|
msgstr "Dikey başlık çubuğu"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:204
|
||||||
|
msgid ""
|
||||||
|
"Change the title bar from horizontal on top, to vertical on the left side"
|
||||||
|
msgstr "Başlık çubuğunu üstte yataydan sol tarafta dikey olarak değiştirin"
|
||||||
|
|
||||||
|
#: qt\tabbed_window.py:44
|
||||||
|
msgid "Show tab bar"
|
||||||
|
msgstr "Sekme çubuğunu göster"
|
||||||
|
|
||||||
|
#: qt\exclude_list_dialog.py:152
|
||||||
|
msgid ""
|
||||||
|
"These (case sensitive) python regular expressions will filter out files during scans.<br>Directores will also have their <strong>default state</strong> set to Excluded in the Directories tab if their name happens to match one of the selected regular expressions.<br>For each file collected, two tests are performed to determine whether or not to completely ignore it:<br><li>1. Regular expressions with no path separator in them will be compared to the file name only.</li>\n"
|
||||||
|
"<li>2. Regular expressions with at least one path separator in them will be compared to the full path to the file.</li><br>\n"
|
||||||
|
"Example: if you want to filter out .PNG files from the \"My Pictures\" directory only:<br><code>.*My\\sPictures\\\\.*\\.png</code><br><br>You can test the regular expression with the \"test string\" button after pasting a fake path in the test field:<br><code>C:\\\\User\\My Pictures\\test.png</code><br><br>\n"
|
||||||
|
"Matching regular expressions will be highlighted.<br>If there is at least one highlight, the path or filename tested will be ignored during scans.<br><br>Directories and files starting with a period '.' are filtered out by default.<br><br>"
|
||||||
|
msgstr ""
|
||||||
|
"Bu (büyük/küçük harfe duyarlı) python düzenli ifadeleri, taramalar sırasında dosyaları filtreleyecektir.<br>Dizinler ayrıca, adları seçilen düzenli ifadelerden biriyle eşleşirse, Dizinler sekmesinde <strong>varsayılan durumları</strong> Hariç Tutuldu olarak ayarlanır.<br>Toplanan her dosya için, tamamen göz ardı edilip edilmeyeceğini belirlemek için iki test yapılır:<br><li>1. İçinde yol ayırıcı olmayan düzenli ifadeler yalnızca dosya adıyla karşılaştırılacaktır.</li>\n"
|
||||||
|
"<li>2. İçinde en az bir yol ayırıcı bulunan düzenli ifadeler, dosyanın tam yolu ile karşılaştırılacaktır.</li><br>\n"
|
||||||
|
"<br>Örnek: .PNG dosyalarını yalnızca \"My Pictures\" dizininden filtrelemek istiyorsanız:<code>.*My\\sPictures\\\\.*\\.png</code><br><br>Test alanına sahte bir yol yapıştırdıktan sonra normal ifadeyi \"test dizesi\" düğmesiyle test edebilirsiniz:<br><code>C:\\\\User\\My Pictures\\test.png</code><br><br>\n"
|
||||||
|
"Eşleşen normal ifadeler vurgulanacaktır.<br>En az bir vurgu varsa, test edilen yol veya dosya adı taramalar sırasında yok sayılır.<br><br>Nokta '.' ile başlayan dizinler ve dosyalar varsayılan olarak filtrelenir.<br><br>"
|
||||||
|
|
||||||
|
#: qt\app.py:256
|
||||||
|
msgid "Results"
|
||||||
|
msgstr "Sonuçlar"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:150
|
||||||
|
msgid "General Interface"
|
||||||
|
msgstr "Genel Arayüz"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:176
|
||||||
|
msgid "Result Table"
|
||||||
|
msgstr "Sonuç Tablosu"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:205
|
||||||
|
msgid "Details Window"
|
||||||
|
msgstr "Ayrıntı Penceresi"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:285
|
||||||
|
msgid "General"
|
||||||
|
msgstr "Genel"
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:286
|
||||||
|
msgid "Display"
|
||||||
|
msgstr "Görüntüle"
|
||||||
@@ -905,3 +905,21 @@ msgstr ""
|
|||||||
#: qt\preferences_dialog.py:286
|
#: qt\preferences_dialog.py:286
|
||||||
msgid "Display"
|
msgid "Display"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
#: qt\se\preferences_dialog.py:70
|
||||||
|
msgid "Partially hash files bigger than"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: qt\se\preferences_dialog.py:80
|
||||||
|
msgid "MB"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:163
|
||||||
|
msgid "Use native OS dialogs"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
#: qt\preferences_dialog.py:166
|
||||||
|
msgid ""
|
||||||
|
"For actions such as file/folder selection use the OS native dialogs.\n"
|
||||||
|
"Some native dialogs have limited functionality."
|
||||||
|
msgstr ""
|
||||||
|
|||||||
2
macos.md
2
macos.md
@@ -11,7 +11,7 @@
|
|||||||
1. Install Xcode if desired
|
1. Install Xcode if desired
|
||||||
2. Install [Homebrew][homebrew], if not on the path after install (arm based Macs) create `~/.zshrc`
|
2. Install [Homebrew][homebrew], if not on the path after install (arm based Macs) create `~/.zshrc`
|
||||||
with `export PATH="/opt/homebrew/bin:$PATH"`. Will need to reload terminal or source the file to take
|
with `export PATH="/opt/homebrew/bin:$PATH"`. Will need to reload terminal or source the file to take
|
||||||
affect.
|
effect.
|
||||||
3. Install qt5 with `brew`. If you are using a version of macos without system python 3.6+ then you will
|
3. Install qt5 with `brew`. If you are using a version of macos without system python 3.6+ then you will
|
||||||
also need to install that via brew or with pyenv.
|
also need to install that via brew or with pyenv.
|
||||||
|
|
||||||
|
|||||||
87
package.py
87
package.py
@@ -26,6 +26,10 @@ from hscommon.build import (
|
|||||||
copy_all,
|
copy_all,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
ENTRY_SCRIPT = "run.py"
|
||||||
|
LOCALE_DIR = "build/locale"
|
||||||
|
HELP_DIR = "build/help"
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
def parse_args():
|
||||||
parser = ArgumentParser()
|
parser = ArgumentParser()
|
||||||
@@ -33,6 +37,15 @@ def parse_args():
|
|||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def check_loc_doc():
|
||||||
|
if not op.exists(LOCALE_DIR):
|
||||||
|
print('Locale files are missing. Have you run "build.py --loc"?')
|
||||||
|
# include help files if they are built otherwise exit as they should be included?
|
||||||
|
if not op.exists(HELP_DIR):
|
||||||
|
print('Help files are missing. Have you run "build.py --doc"?')
|
||||||
|
return op.exists(LOCALE_DIR) and op.exists(HELP_DIR)
|
||||||
|
|
||||||
|
|
||||||
def copy_files_to_package(destpath, packages, with_so):
|
def copy_files_to_package(destpath, packages, with_so):
|
||||||
# when with_so is true, we keep .so files in the package, and otherwise, we don't. We need this
|
# when with_so is true, we keep .so files in the package, and otherwise, we don't. We need this
|
||||||
# flag because when building debian src pkg, we *don't* want .so files (they're compiled later)
|
# flag because when building debian src pkg, we *don't* want .so files (they're compiled later)
|
||||||
@@ -40,17 +53,13 @@ def copy_files_to_package(destpath, packages, with_so):
|
|||||||
if op.exists(destpath):
|
if op.exists(destpath):
|
||||||
shutil.rmtree(destpath)
|
shutil.rmtree(destpath)
|
||||||
os.makedirs(destpath)
|
os.makedirs(destpath)
|
||||||
shutil.copy("run.py", op.join(destpath, "run.py"))
|
shutil.copy(ENTRY_SCRIPT, op.join(destpath, ENTRY_SCRIPT))
|
||||||
extra_ignores = ["*.so"] if not with_so else None
|
extra_ignores = ["*.so"] if not with_so else None
|
||||||
copy_packages(packages, destpath, extra_ignores=extra_ignores)
|
copy_packages(packages, destpath, extra_ignores=extra_ignores)
|
||||||
# include locale files if they are built otherwise exit as it will break
|
# include locale files if they are built otherwise exit as it will break
|
||||||
# the localization
|
# the localization
|
||||||
if not op.exists("build/locale"):
|
if not check_loc_doc():
|
||||||
print('Locale files are missing. Have you run "build.py --loc"? Exiting...')
|
print("Exiting...")
|
||||||
return
|
|
||||||
# include help files if they are built otherwise exit as they should be included?
|
|
||||||
if not op.exists("build/help"):
|
|
||||||
print('Help files are missing. Have you run "build.py --doc"? Exiting...')
|
|
||||||
return
|
return
|
||||||
shutil.copytree(op.join("build", "help"), op.join(destpath, "help"))
|
shutil.copytree(op.join("build", "help"), op.join(destpath, "help"))
|
||||||
shutil.copytree(op.join("build", "locale"), op.join(destpath, "locale"))
|
shutil.copytree(op.join("build", "locale"), op.join(destpath, "locale"))
|
||||||
@@ -62,7 +71,7 @@ def package_debian_distribution(distribution):
|
|||||||
version = "{}~{}".format(app_version, distribution)
|
version = "{}~{}".format(app_version, distribution)
|
||||||
destpath = op.join("build", "dupeguru-{}".format(version))
|
destpath = op.join("build", "dupeguru-{}".format(version))
|
||||||
srcpath = op.join(destpath, "src")
|
srcpath = op.join(destpath, "src")
|
||||||
packages = ["hscommon", "core", "qtlib", "qt", "send2trash", "hsaudiotag"]
|
packages = ["hscommon", "core", "qtlib", "qt", "send2trash"]
|
||||||
copy_files_to_package(srcpath, packages, with_so=False)
|
copy_files_to_package(srcpath, packages, with_so=False)
|
||||||
os.mkdir(op.join(destpath, "modules"))
|
os.mkdir(op.join(destpath, "modules"))
|
||||||
copy_all(op.join("core", "pe", "modules", "*.*"), op.join(destpath, "modules"))
|
copy_all(op.join("core", "pe", "modules", "*.*"), op.join(destpath, "modules"))
|
||||||
@@ -82,11 +91,7 @@ def package_debian_distribution(distribution):
|
|||||||
copy(op.join(debskel, fn), op.join(debdest, fn))
|
copy(op.join(debskel, fn), op.join(debdest, fn))
|
||||||
filereplace(op.join(debskel, "control"), op.join(debdest, "control"), **debopts)
|
filereplace(op.join(debskel, "control"), op.join(debdest, "control"), **debopts)
|
||||||
filereplace(op.join(debskel, "Makefile"), op.join(destpath, "Makefile"), **debopts)
|
filereplace(op.join(debskel, "Makefile"), op.join(destpath, "Makefile"), **debopts)
|
||||||
filereplace(
|
filereplace(op.join(debskel, "dupeguru.desktop"), op.join(debdest, "dupeguru.desktop"), **debopts)
|
||||||
op.join(debskel, "dupeguru.desktop"),
|
|
||||||
op.join(debdest, "dupeguru.desktop"),
|
|
||||||
**debopts
|
|
||||||
)
|
|
||||||
changelogpath = op.join("help", "changelog")
|
changelogpath = op.join("help", "changelog")
|
||||||
changelog_dest = op.join(debdest, "changelog")
|
changelog_dest = op.join(debdest, "changelog")
|
||||||
project_name = debopts["pkgname"]
|
project_name = debopts["pkgname"]
|
||||||
@@ -117,22 +122,11 @@ def package_arch():
|
|||||||
# need to include them).
|
# need to include them).
|
||||||
print("Packaging for Arch")
|
print("Packaging for Arch")
|
||||||
srcpath = op.join("build", "dupeguru-arch")
|
srcpath = op.join("build", "dupeguru-arch")
|
||||||
packages = [
|
packages = ["hscommon", "core", "qtlib", "qt", "send2trash"]
|
||||||
"hscommon",
|
|
||||||
"core",
|
|
||||||
"qtlib",
|
|
||||||
"qt",
|
|
||||||
"send2trash",
|
|
||||||
"hsaudiotag",
|
|
||||||
]
|
|
||||||
copy_files_to_package(srcpath, packages, with_so=True)
|
copy_files_to_package(srcpath, packages, with_so=True)
|
||||||
shutil.copy(op.join("images", "dgse_logo_128.png"), srcpath)
|
shutil.copy(op.join("images", "dgse_logo_128.png"), srcpath)
|
||||||
debopts = json.load(open(op.join("pkg", "arch", "dupeguru.json")))
|
debopts = json.load(open(op.join("pkg", "arch", "dupeguru.json")))
|
||||||
filereplace(
|
filereplace(op.join("pkg", "arch", "dupeguru.desktop"), op.join(srcpath, "dupeguru.desktop"), **debopts)
|
||||||
op.join("pkg", "arch", "dupeguru.desktop"),
|
|
||||||
op.join(srcpath, "dupeguru.desktop"),
|
|
||||||
**debopts
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def package_source_txz():
|
def package_source_txz():
|
||||||
@@ -160,12 +154,8 @@ def package_windows():
|
|||||||
arch = "x86"
|
arch = "x86"
|
||||||
# include locale files if they are built otherwise exit as it will break
|
# include locale files if they are built otherwise exit as it will break
|
||||||
# the localization
|
# the localization
|
||||||
if not op.exists("build/locale"):
|
if not check_loc_doc():
|
||||||
print('Locale files are missing. Have you run "build.py --loc"? Exiting...')
|
print("Exiting...")
|
||||||
return
|
|
||||||
# include help files if they are built otherwise exit as they should be included?
|
|
||||||
if not op.exists("build/help"):
|
|
||||||
print('Help files are missing. Have you run "build.py --doc"? Exiting...')
|
|
||||||
return
|
return
|
||||||
# create version information file from template
|
# create version information file from template
|
||||||
try:
|
try:
|
||||||
@@ -173,11 +163,7 @@ def package_windows():
|
|||||||
version_info = version_template.read()
|
version_info = version_template.read()
|
||||||
version_template.close()
|
version_template.close()
|
||||||
version_info_file = open("win_version_info.txt", "w")
|
version_info_file = open("win_version_info.txt", "w")
|
||||||
version_info_file.write(
|
version_info_file.write(version_info.format(version_array[0], version_array[1], version_array[2], bits))
|
||||||
version_info.format(
|
|
||||||
version_array[0], version_array[1], version_array[2], bits
|
|
||||||
)
|
|
||||||
)
|
|
||||||
version_info_file.close()
|
version_info_file.close()
|
||||||
except Exception:
|
except Exception:
|
||||||
print("Error creating version info file, exiting...")
|
print("Error creating version info file, exiting...")
|
||||||
@@ -192,13 +178,11 @@ def package_windows():
|
|||||||
"--windowed",
|
"--windowed",
|
||||||
"--noconfirm",
|
"--noconfirm",
|
||||||
"--icon=images/dgse_logo.ico",
|
"--icon=images/dgse_logo.ico",
|
||||||
"--add-data=build/locale;locale",
|
"--add-data={0};locale".format(LOCALE_DIR),
|
||||||
"--add-data=build/help;help",
|
"--add-data={0};help".format(HELP_DIR),
|
||||||
"--version-file=win_version_info.txt",
|
"--version-file=win_version_info.txt",
|
||||||
"--paths=C:\\Program Files (x86)\\Windows Kits\\10\\Redist\\ucrt\\DLLs\\{0}".format(
|
"--paths=C:\\Program Files (x86)\\Windows Kits\\10\\Redist\\ucrt\\DLLs\\{0}".format(arch),
|
||||||
arch
|
ENTRY_SCRIPT,
|
||||||
),
|
|
||||||
"run.py",
|
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
# remove version info file
|
# remove version info file
|
||||||
@@ -214,12 +198,8 @@ def package_windows():
|
|||||||
def package_macos():
|
def package_macos():
|
||||||
# include locale files if they are built otherwise exit as it will break
|
# include locale files if they are built otherwise exit as it will break
|
||||||
# the localization
|
# the localization
|
||||||
if not op.exists("build/locale"):
|
if not check_loc_doc():
|
||||||
print('Locale files are missing. Have you run "build.py --loc"? Exiting...')
|
print("Exiting")
|
||||||
return
|
|
||||||
# include help files if they are built otherwise exit as they should be included?
|
|
||||||
if not op.exists("build/help"):
|
|
||||||
print('Help files are missing. Have you run "build.py --doc"? Exiting...')
|
|
||||||
return
|
return
|
||||||
# run pyinstaller from here:
|
# run pyinstaller from here:
|
||||||
import PyInstaller.__main__
|
import PyInstaller.__main__
|
||||||
@@ -231,9 +211,9 @@ def package_macos():
|
|||||||
"--noconfirm",
|
"--noconfirm",
|
||||||
"--icon=images/dupeguru.icns",
|
"--icon=images/dupeguru.icns",
|
||||||
"--osx-bundle-identifier=com.hardcoded-software.dupeguru",
|
"--osx-bundle-identifier=com.hardcoded-software.dupeguru",
|
||||||
"--add-data=build/locale:locale",
|
"--add-data={0}:locale".format(LOCALE_DIR),
|
||||||
"--add-data=build/help:help",
|
"--add-data={0}:help".format(HELP_DIR),
|
||||||
"run.py",
|
ENTRY_SCRIPT,
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -246,7 +226,8 @@ def main():
|
|||||||
return
|
return
|
||||||
print("Packaging dupeGuru with UI qt")
|
print("Packaging dupeGuru with UI qt")
|
||||||
if sys.platform == "win32":
|
if sys.platform == "win32":
|
||||||
package_windows()
|
package_debian()
|
||||||
|
# package_windows()
|
||||||
elif sys.platform == "darwin":
|
elif sys.platform == "darwin":
|
||||||
package_macos()
|
package_macos()
|
||||||
else:
|
else:
|
||||||
|
|||||||
@@ -6,19 +6,19 @@ import importlib
|
|||||||
|
|
||||||
from setuptools import setup, Extension
|
from setuptools import setup, Extension
|
||||||
|
|
||||||
sys.path.insert(1, op.abspath('src'))
|
sys.path.insert(1, op.abspath("src"))
|
||||||
|
|
||||||
from hscommon.build import move_all
|
from hscommon.build import move_all
|
||||||
|
|
||||||
exts = [
|
exts = [
|
||||||
Extension("_block", [op.join('modules', 'block.c'), op.join('modules', 'common.c')]),
|
Extension("_block", [op.join("modules", "block.c"), op.join("modules", "common.c")]),
|
||||||
Extension("_cache", [op.join('modules', 'cache.c'), op.join('modules', 'common.c')]),
|
Extension("_cache", [op.join("modules", "cache.c"), op.join("modules", "common.c")]),
|
||||||
Extension("_block_qt", [op.join('modules', 'block_qt.c')]),
|
Extension("_block_qt", [op.join("modules", "block_qt.c")]),
|
||||||
]
|
]
|
||||||
setup(
|
setup(
|
||||||
script_args = ['build_ext', '--inplace'],
|
script_args=["build_ext", "--inplace"],
|
||||||
ext_modules = exts,
|
ext_modules=exts,
|
||||||
)
|
)
|
||||||
move_all('_block_qt*', op.join('src', 'qt', 'pe'))
|
move_all("_block_qt*", op.join("src", "qt", "pe"))
|
||||||
move_all('_cache*', op.join('src', 'core/pe'))
|
move_all("_cache*", op.join("src", "core/pe"))
|
||||||
move_all('_block*', op.join('src', 'core/pe'))
|
move_all("_block*", op.join("src", "core/pe"))
|
||||||
|
|||||||
5
pyproject.toml
Normal file
5
pyproject.toml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
[build-system]
|
||||||
|
requires = ["setuptools"]
|
||||||
|
build-backend = "setuptools.build_meta"
|
||||||
|
[tool.black]
|
||||||
|
line-length = 120
|
||||||
98
qt/app.py
98
qt/app.py
@@ -16,7 +16,7 @@ from hscommon import desktop
|
|||||||
|
|
||||||
from qtlib.about_box import AboutBox
|
from qtlib.about_box import AboutBox
|
||||||
from qtlib.recent import Recent
|
from qtlib.recent import Recent
|
||||||
from qtlib.util import createActions
|
from qtlib.util import create_actions
|
||||||
from qtlib.progress_window import ProgressWindow
|
from qtlib.progress_window import ProgressWindow
|
||||||
|
|
||||||
from core.app import AppMode, DupeGuru as DupeGuruModel
|
from core.app import AppMode, DupeGuru as DupeGuruModel
|
||||||
@@ -52,7 +52,7 @@ class DupeGuru(QObject):
|
|||||||
# Enable tabs instead of separate floating windows for each dialog
|
# Enable tabs instead of separate floating windows for each dialog
|
||||||
# Could be passed as an argument to this class if we wanted
|
# Could be passed as an argument to this class if we wanted
|
||||||
self.use_tabs = True
|
self.use_tabs = True
|
||||||
self.model = DupeGuruModel(view=self)
|
self.model = DupeGuruModel(view=self, portable=self.prefs.portable)
|
||||||
self._setup()
|
self._setup()
|
||||||
|
|
||||||
# --- Private
|
# --- Private
|
||||||
@@ -65,18 +65,10 @@ class DupeGuru(QObject):
|
|||||||
self.recentResults.mustOpenItem.connect(self.model.load_from)
|
self.recentResults.mustOpenItem.connect(self.model.load_from)
|
||||||
self.resultWindow = None
|
self.resultWindow = None
|
||||||
if self.use_tabs:
|
if self.use_tabs:
|
||||||
self.main_window = (
|
self.main_window = TabBarWindow(self) if not self.prefs.tabs_default_pos else TabWindow(self)
|
||||||
TabBarWindow(self)
|
|
||||||
if not self.prefs.tabs_default_pos
|
|
||||||
else TabWindow(self)
|
|
||||||
)
|
|
||||||
parent_window = self.main_window
|
parent_window = self.main_window
|
||||||
self.directories_dialog = self.main_window.createPage(
|
self.directories_dialog = self.main_window.createPage("DirectoriesDialog", app=self)
|
||||||
"DirectoriesDialog", app=self
|
self.main_window.addTab(self.directories_dialog, tr("Directories"), switch=False)
|
||||||
)
|
|
||||||
self.main_window.addTab(
|
|
||||||
self.directories_dialog, tr("Directories"), switch=False
|
|
||||||
)
|
|
||||||
self.actionDirectoriesWindow.setEnabled(False)
|
self.actionDirectoriesWindow.setEnabled(False)
|
||||||
else: # floating windows only
|
else: # floating windows only
|
||||||
self.main_window = None
|
self.main_window = None
|
||||||
@@ -84,9 +76,7 @@ class DupeGuru(QObject):
|
|||||||
parent_window = self.directories_dialog
|
parent_window = self.directories_dialog
|
||||||
|
|
||||||
self.progress_window = ProgressWindow(parent_window, self.model.progress_window)
|
self.progress_window = ProgressWindow(parent_window, self.model.progress_window)
|
||||||
self.problemDialog = ProblemDialog(
|
self.problemDialog = ProblemDialog(parent=parent_window, model=self.model.problem_dialog)
|
||||||
parent=parent_window, model=self.model.problem_dialog
|
|
||||||
)
|
|
||||||
if self.use_tabs:
|
if self.use_tabs:
|
||||||
self.ignoreListDialog = self.main_window.createPage(
|
self.ignoreListDialog = self.main_window.createPage(
|
||||||
"IgnoreListDialog",
|
"IgnoreListDialog",
|
||||||
@@ -101,16 +91,10 @@ class DupeGuru(QObject):
|
|||||||
model=self.model.exclude_list_dialog,
|
model=self.model.exclude_list_dialog,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
self.ignoreListDialog = IgnoreListDialog(
|
self.ignoreListDialog = IgnoreListDialog(parent=parent_window, model=self.model.ignore_list_dialog)
|
||||||
parent=parent_window, model=self.model.ignore_list_dialog
|
self.excludeDialog = ExcludeListDialog(app=self, parent=parent_window, model=self.model.exclude_list_dialog)
|
||||||
)
|
|
||||||
self.excludeDialog = ExcludeListDialog(
|
|
||||||
app=self, parent=parent_window, model=self.model.exclude_list_dialog
|
|
||||||
)
|
|
||||||
|
|
||||||
self.deletionOptions = DeletionOptions(
|
self.deletionOptions = DeletionOptions(parent=parent_window, model=self.model.deletion_options)
|
||||||
parent=parent_window, model=self.model.deletion_options
|
|
||||||
)
|
|
||||||
self.about_box = AboutBox(parent_window, self)
|
self.about_box = AboutBox(parent_window, self)
|
||||||
|
|
||||||
parent_window.show()
|
parent_window.show()
|
||||||
@@ -168,26 +152,27 @@ class DupeGuru(QObject):
|
|||||||
self.openDebugLogTriggered,
|
self.openDebugLogTriggered,
|
||||||
),
|
),
|
||||||
]
|
]
|
||||||
createActions(ACTIONS, self)
|
create_actions(ACTIONS, self)
|
||||||
|
|
||||||
def _update_options(self):
|
def _update_options(self):
|
||||||
self.model.options["mix_file_kind"] = self.prefs.mix_file_kind
|
self.model.options["mix_file_kind"] = self.prefs.mix_file_kind
|
||||||
self.model.options["escape_filter_regexp"] = not self.prefs.use_regexp
|
self.model.options["escape_filter_regexp"] = not self.prefs.use_regexp
|
||||||
self.model.options["clean_empty_dirs"] = self.prefs.remove_empty_folders
|
self.model.options["clean_empty_dirs"] = self.prefs.remove_empty_folders
|
||||||
self.model.options[
|
self.model.options["ignore_hardlink_matches"] = self.prefs.ignore_hardlink_matches
|
||||||
"ignore_hardlink_matches"
|
|
||||||
] = self.prefs.ignore_hardlink_matches
|
|
||||||
self.model.options["copymove_dest_type"] = self.prefs.destination_type
|
self.model.options["copymove_dest_type"] = self.prefs.destination_type
|
||||||
self.model.options["scan_type"] = self.prefs.get_scan_type(self.model.app_mode)
|
self.model.options["scan_type"] = self.prefs.get_scan_type(self.model.app_mode)
|
||||||
self.model.options["min_match_percentage"] = self.prefs.filter_hardness
|
self.model.options["min_match_percentage"] = self.prefs.filter_hardness
|
||||||
self.model.options["word_weighting"] = self.prefs.word_weighting
|
self.model.options["word_weighting"] = self.prefs.word_weighting
|
||||||
self.model.options["match_similar_words"] = self.prefs.match_similar
|
self.model.options["match_similar_words"] = self.prefs.match_similar
|
||||||
threshold = (
|
threshold = self.prefs.small_file_threshold if self.prefs.ignore_small_files else 0
|
||||||
self.prefs.small_file_threshold if self.prefs.ignore_small_files else 0
|
self.model.options["size_threshold"] = threshold * 1024 # threshold is in KB. The scanner wants bytes
|
||||||
|
big_file_size_threshold = self.prefs.big_file_size_threshold if self.prefs.big_file_partial_hashes else 0
|
||||||
|
self.model.options["big_file_size_threshold"] = (
|
||||||
|
big_file_size_threshold
|
||||||
|
* 1024
|
||||||
|
* 1024
|
||||||
|
# threshold is in MiB. The scanner wants bytes
|
||||||
)
|
)
|
||||||
self.model.options["size_threshold"] = (
|
|
||||||
threshold * 1024
|
|
||||||
) # threshold is in KB. the scanner wants bytes
|
|
||||||
scanned_tags = set()
|
scanned_tags = set()
|
||||||
if self.prefs.scan_tag_track:
|
if self.prefs.scan_tag_track:
|
||||||
scanned_tags.add("track")
|
scanned_tags.add("track")
|
||||||
@@ -210,17 +195,17 @@ class DupeGuru(QObject):
|
|||||||
|
|
||||||
# --- Private
|
# --- Private
|
||||||
def _get_details_dialog_class(self):
|
def _get_details_dialog_class(self):
|
||||||
if self.model.app_mode == AppMode.Picture:
|
if self.model.app_mode == AppMode.PICTURE:
|
||||||
return DetailsDialogPicture
|
return DetailsDialogPicture
|
||||||
elif self.model.app_mode == AppMode.Music:
|
elif self.model.app_mode == AppMode.MUSIC:
|
||||||
return DetailsDialogMusic
|
return DetailsDialogMusic
|
||||||
else:
|
else:
|
||||||
return DetailsDialogStandard
|
return DetailsDialogStandard
|
||||||
|
|
||||||
def _get_preferences_dialog_class(self):
|
def _get_preferences_dialog_class(self):
|
||||||
if self.model.app_mode == AppMode.Picture:
|
if self.model.app_mode == AppMode.PICTURE:
|
||||||
return PreferencesDialogPicture
|
return PreferencesDialogPicture
|
||||||
elif self.model.app_mode == AppMode.Music:
|
elif self.model.app_mode == AppMode.MUSIC:
|
||||||
return PreferencesDialogMusic
|
return PreferencesDialogMusic
|
||||||
else:
|
else:
|
||||||
return PreferencesDialogStandard
|
return PreferencesDialogStandard
|
||||||
@@ -252,9 +237,7 @@ class DupeGuru(QObject):
|
|||||||
if self.resultWindow is not None:
|
if self.resultWindow is not None:
|
||||||
if self.use_tabs:
|
if self.use_tabs:
|
||||||
if self.main_window.indexOfWidget(self.resultWindow) < 0:
|
if self.main_window.indexOfWidget(self.resultWindow) < 0:
|
||||||
self.main_window.addTab(
|
self.main_window.addTab(self.resultWindow, tr("Results"), switch=True)
|
||||||
self.resultWindow, tr("Results"), switch=True
|
|
||||||
)
|
|
||||||
return
|
return
|
||||||
self.main_window.showTab(self.resultWindow)
|
self.main_window.showTab(self.resultWindow)
|
||||||
else:
|
else:
|
||||||
@@ -271,6 +254,9 @@ class DupeGuru(QObject):
|
|||||||
self.willSavePrefs.emit()
|
self.willSavePrefs.emit()
|
||||||
self.prefs.save()
|
self.prefs.save()
|
||||||
self.model.save()
|
self.model.save()
|
||||||
|
# Workaround for #857, hide() or close().
|
||||||
|
if self.details_dialog is not None:
|
||||||
|
self.details_dialog.close()
|
||||||
QApplication.quit()
|
QApplication.quit()
|
||||||
|
|
||||||
# --- Signals
|
# --- Signals
|
||||||
@@ -291,6 +277,12 @@ class DupeGuru(QObject):
|
|||||||
"Wrong Locale",
|
"Wrong Locale",
|
||||||
msg,
|
msg,
|
||||||
)
|
)
|
||||||
|
# Load results on open if passed a .dupeguru file
|
||||||
|
if len(sys.argv) > 1:
|
||||||
|
results = sys.argv[1]
|
||||||
|
if results.endswith(".dupeguru"):
|
||||||
|
self.model.load_from(results)
|
||||||
|
self.recentResults.insertItem(results)
|
||||||
|
|
||||||
def clearPictureCacheTriggered(self):
|
def clearPictureCacheTriggered(self):
|
||||||
title = tr("Clear Picture Cache")
|
title = tr("Clear Picture Cache")
|
||||||
@@ -308,9 +300,7 @@ class DupeGuru(QObject):
|
|||||||
|
|
||||||
def excludeListTriggered(self):
|
def excludeListTriggered(self):
|
||||||
if self.use_tabs:
|
if self.use_tabs:
|
||||||
self.showTriggeredTabbedDialog(
|
self.showTriggeredTabbedDialog(self.excludeListDialog, tr("Exclusion Filters"))
|
||||||
self.excludeListDialog, tr("Exclusion Filters")
|
|
||||||
)
|
|
||||||
else: # floating windows
|
else: # floating windows
|
||||||
self.model.exclude_list_dialog.show()
|
self.model.exclude_list_dialog.show()
|
||||||
|
|
||||||
@@ -318,16 +308,14 @@ class DupeGuru(QObject):
|
|||||||
"""Add tab for dialog, name the tab with desc_string, then show it."""
|
"""Add tab for dialog, name the tab with desc_string, then show it."""
|
||||||
index = self.main_window.indexOfWidget(dialog)
|
index = self.main_window.indexOfWidget(dialog)
|
||||||
# Create the tab if it doesn't exist already
|
# Create the tab if it doesn't exist already
|
||||||
if (
|
if index < 0: # or (not dialog.isVisible() and not self.main_window.isTabVisible(index)):
|
||||||
index < 0
|
|
||||||
): # or (not dialog.isVisible() and not self.main_window.isTabVisible(index)):
|
|
||||||
index = self.main_window.addTab(dialog, desc_string, switch=True)
|
index = self.main_window.addTab(dialog, desc_string, switch=True)
|
||||||
# Show the tab for that widget
|
# Show the tab for that widget
|
||||||
self.main_window.setCurrentIndex(index)
|
self.main_window.setCurrentIndex(index)
|
||||||
|
|
||||||
def openDebugLogTriggered(self):
|
def openDebugLogTriggered(self):
|
||||||
debugLogPath = op.join(self.model.appdata, "debug.log")
|
debug_log_path = op.join(self.model.appdata, "debug.log")
|
||||||
desktop.open_path(debugLogPath)
|
desktop.open_path(debug_log_path)
|
||||||
|
|
||||||
def preferencesTriggered(self):
|
def preferencesTriggered(self):
|
||||||
preferences_dialog = self._get_preferences_dialog_class()(
|
preferences_dialog = self._get_preferences_dialog_class()(
|
||||||
@@ -392,13 +380,9 @@ class DupeGuru(QObject):
|
|||||||
if self.resultWindow is not None:
|
if self.resultWindow is not None:
|
||||||
self.resultWindow.close()
|
self.resultWindow.close()
|
||||||
# This is better for tabs, as it takes care of duplicate items in menu bar
|
# This is better for tabs, as it takes care of duplicate items in menu bar
|
||||||
self.resultWindow.deleteLater() if self.use_tabs else self.resultWindow.setParent(
|
self.resultWindow.deleteLater() if self.use_tabs else self.resultWindow.setParent(None)
|
||||||
None
|
|
||||||
)
|
|
||||||
if self.use_tabs:
|
if self.use_tabs:
|
||||||
self.resultWindow = self.main_window.createPage(
|
self.resultWindow = self.main_window.createPage("ResultWindow", parent=self.main_window, app=self)
|
||||||
"ResultWindow", parent=self.main_window, app=self
|
|
||||||
)
|
|
||||||
else: # We don't use a tab widget, regular floating QMainWindow
|
else: # We don't use a tab widget, regular floating QMainWindow
|
||||||
self.resultWindow = ResultWindow(self.directories_dialog, self)
|
self.resultWindow = ResultWindow(self.directories_dialog, self)
|
||||||
self.directories_dialog._updateActionsState()
|
self.directories_dialog._updateActionsState()
|
||||||
@@ -416,9 +400,7 @@ class DupeGuru(QObject):
|
|||||||
|
|
||||||
def select_dest_file(self, prompt, extension):
|
def select_dest_file(self, prompt, extension):
|
||||||
files = tr("{} file (*.{})").format(extension.upper(), extension)
|
files = tr("{} file (*.{})").format(extension.upper(), extension)
|
||||||
destination, chosen_filter = QFileDialog.getSaveFileName(
|
destination, chosen_filter = QFileDialog.getSaveFileName(self.resultWindow, prompt, "", files)
|
||||||
self.resultWindow, prompt, "", files
|
|
||||||
)
|
|
||||||
if not destination.endswith(".{}".format(extension)):
|
if not destination.endswith(".{}".format(extension)):
|
||||||
destination = "{}.{}".format(destination, extension)
|
destination = "{}.{}".format(destination, extension)
|
||||||
return destination
|
return destination
|
||||||
|
|||||||
@@ -42,9 +42,7 @@ class DeletionOptions(QDialog):
|
|||||||
self.linkMessageLabel = QLabel(text)
|
self.linkMessageLabel = QLabel(text)
|
||||||
self.linkMessageLabel.setWordWrap(True)
|
self.linkMessageLabel.setWordWrap(True)
|
||||||
self.verticalLayout.addWidget(self.linkMessageLabel)
|
self.verticalLayout.addWidget(self.linkMessageLabel)
|
||||||
self.linkTypeRadio = RadioBox(
|
self.linkTypeRadio = RadioBox(items=[tr("Symlink"), tr("Hardlink")], spread=False)
|
||||||
items=[tr("Symlink"), tr("Hardlink")], spread=False
|
|
||||||
)
|
|
||||||
self.verticalLayout.addWidget(self.linkTypeRadio)
|
self.verticalLayout.addWidget(self.linkTypeRadio)
|
||||||
if not self.model.supports_links():
|
if not self.model.supports_links():
|
||||||
self.linkCheckbox.setEnabled(False)
|
self.linkCheckbox.setEnabled(False)
|
||||||
|
|||||||
@@ -31,8 +31,7 @@ class DetailsDialog(QDockWidget):
|
|||||||
self.model.view = self
|
self.model.view = self
|
||||||
self.app.willSavePrefs.connect(self.appWillSavePrefs)
|
self.app.willSavePrefs.connect(self.appWillSavePrefs)
|
||||||
# self.setAttribute(Qt.WA_DeleteOnClose)
|
# self.setAttribute(Qt.WA_DeleteOnClose)
|
||||||
parent.addDockWidget(
|
parent.addDockWidget(area if self._wasDocked else Qt.BottomDockWidgetArea, self)
|
||||||
area if self._wasDocked else Qt.BottomDockWidgetArea, self)
|
|
||||||
|
|
||||||
def _setupUi(self): # Virtual
|
def _setupUi(self): # Virtual
|
||||||
pass
|
pass
|
||||||
@@ -51,7 +50,7 @@ class DetailsDialog(QDockWidget):
|
|||||||
if not self.titleBarWidget(): # default title bar
|
if not self.titleBarWidget(): # default title bar
|
||||||
self.setTitleBarWidget(QWidget()) # disables title bar
|
self.setTitleBarWidget(QWidget()) # disables title bar
|
||||||
# Windows (and MacOS?) users cannot move a floating window which
|
# Windows (and MacOS?) users cannot move a floating window which
|
||||||
# has not native decoration so we force it to dock for now
|
# has no native decoration so we force it to dock for now
|
||||||
if not ISLINUX:
|
if not ISLINUX:
|
||||||
self.setFloating(False)
|
self.setFloating(False)
|
||||||
elif self.titleBarWidget() is not None: # title bar is disabled
|
elif self.titleBarWidget() is not None: # title bar is disabled
|
||||||
|
|||||||
@@ -34,9 +34,11 @@ class DetailsModel(QAbstractTableModel):
|
|||||||
row = index.row()
|
row = index.row()
|
||||||
|
|
||||||
ignored_fields = ["Dupe Count"]
|
ignored_fields = ["Dupe Count"]
|
||||||
if (self.model.row(row)[0] in ignored_fields
|
if (
|
||||||
|
self.model.row(row)[0] in ignored_fields
|
||||||
or self.model.row(row)[1] == "---"
|
or self.model.row(row)[1] == "---"
|
||||||
or self.model.row(row)[2] == "---"):
|
or self.model.row(row)[2] == "---"
|
||||||
|
):
|
||||||
if role != Qt.DisplayRole:
|
if role != Qt.DisplayRole:
|
||||||
return None
|
return None
|
||||||
return self.model.row(row)[column]
|
return self.model.row(row)[column]
|
||||||
@@ -52,17 +54,9 @@ class DetailsModel(QAbstractTableModel):
|
|||||||
return None # QVariant()
|
return None # QVariant()
|
||||||
|
|
||||||
def headerData(self, section, orientation, role):
|
def headerData(self, section, orientation, role):
|
||||||
if (
|
if orientation == Qt.Horizontal and role == Qt.DisplayRole and section < len(HEADER):
|
||||||
orientation == Qt.Horizontal
|
|
||||||
and role == Qt.DisplayRole
|
|
||||||
and section < len(HEADER)
|
|
||||||
):
|
|
||||||
return HEADER[section]
|
return HEADER[section]
|
||||||
elif (
|
elif orientation == Qt.Vertical and role == Qt.DisplayRole and section < self.model.row_count():
|
||||||
orientation == Qt.Vertical
|
|
||||||
and role == Qt.DisplayRole
|
|
||||||
and section < self.model.row_count()
|
|
||||||
):
|
|
||||||
# Read "Attribute" cell for horizontal header
|
# Read "Attribute" cell for horizontal header
|
||||||
return self.model.row(section)[0]
|
return self.model.row(section)[0]
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
|
|
||||||
from PyQt5.QtCore import QRect, Qt
|
from PyQt5.QtCore import QRect, Qt
|
||||||
from PyQt5.QtWidgets import (
|
from PyQt5.QtWidgets import (
|
||||||
|
QListView,
|
||||||
QWidget,
|
QWidget,
|
||||||
QFileDialog,
|
QFileDialog,
|
||||||
QHeaderView,
|
QHeaderView,
|
||||||
@@ -28,7 +29,7 @@ from hscommon.trans import trget
|
|||||||
from core.app import AppMode
|
from core.app import AppMode
|
||||||
from qtlib.radio_box import RadioBox
|
from qtlib.radio_box import RadioBox
|
||||||
from qtlib.recent import Recent
|
from qtlib.recent import Recent
|
||||||
from qtlib.util import moveToScreenCenter, createActions
|
from qtlib.util import move_to_screen_center, create_actions
|
||||||
|
|
||||||
from . import platform
|
from . import platform
|
||||||
from .directories_model import DirectoriesModel, DirectoriesDelegate
|
from .directories_model import DirectoriesModel, DirectoriesDelegate
|
||||||
@@ -45,9 +46,7 @@ class DirectoriesDialog(QMainWindow):
|
|||||||
self.recentFolders = Recent(self.app, "recentFolders")
|
self.recentFolders = Recent(self.app, "recentFolders")
|
||||||
self._setupUi()
|
self._setupUi()
|
||||||
self._updateScanTypeList()
|
self._updateScanTypeList()
|
||||||
self.directoriesModel = DirectoriesModel(
|
self.directoriesModel = DirectoriesModel(self.app.model.directory_tree, view=self.treeView)
|
||||||
self.app.model.directory_tree, view=self.treeView
|
|
||||||
)
|
|
||||||
self.directoriesDelegate = DirectoriesDelegate()
|
self.directoriesDelegate = DirectoriesDelegate()
|
||||||
self.treeView.setItemDelegate(self.directoriesDelegate)
|
self.treeView.setItemDelegate(self.directoriesDelegate)
|
||||||
self._setupColumns()
|
self._setupColumns()
|
||||||
@@ -95,7 +94,7 @@ class DirectoriesDialog(QMainWindow):
|
|||||||
("actionLoadDirectories", "", "", tr("Load Directories..."), self.loadDirectoriesTriggered),
|
("actionLoadDirectories", "", "", tr("Load Directories..."), self.loadDirectoriesTriggered),
|
||||||
("actionSaveDirectories", "", "", tr("Save Directories..."), self.saveDirectoriesTriggered),
|
("actionSaveDirectories", "", "", tr("Save Directories..."), self.saveDirectoriesTriggered),
|
||||||
]
|
]
|
||||||
createActions(ACTIONS, self)
|
create_actions(ACTIONS, self)
|
||||||
if self.app.use_tabs:
|
if self.app.use_tabs:
|
||||||
# Keep track of actions which should only be accessible from this window
|
# Keep track of actions which should only be accessible from this window
|
||||||
self.specific_actions.add(self.actionLoadDirectories)
|
self.specific_actions.add(self.actionLoadDirectories)
|
||||||
@@ -170,9 +169,7 @@ class DirectoriesDialog(QMainWindow):
|
|||||||
label = QLabel(tr("Application Mode:"), self)
|
label = QLabel(tr("Application Mode:"), self)
|
||||||
label.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
|
label.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
|
||||||
hl.addWidget(label)
|
hl.addWidget(label)
|
||||||
self.appModeRadioBox = RadioBox(
|
self.appModeRadioBox = RadioBox(self, items=[tr("Standard"), tr("Music"), tr("Picture")], spread=False)
|
||||||
self, items=[tr("Standard"), tr("Music"), tr("Picture")], spread=False
|
|
||||||
)
|
|
||||||
hl.addWidget(self.appModeRadioBox)
|
hl.addWidget(self.appModeRadioBox)
|
||||||
self.verticalLayout.addLayout(hl)
|
self.verticalLayout.addLayout(hl)
|
||||||
hl = QHBoxLayout()
|
hl = QHBoxLayout()
|
||||||
@@ -181,27 +178,21 @@ class DirectoriesDialog(QMainWindow):
|
|||||||
label.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
|
label.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
|
||||||
hl.addWidget(label)
|
hl.addWidget(label)
|
||||||
self.scanTypeComboBox = QComboBox(self)
|
self.scanTypeComboBox = QComboBox(self)
|
||||||
self.scanTypeComboBox.setSizePolicy(
|
self.scanTypeComboBox.setSizePolicy(QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed))
|
||||||
QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Fixed)
|
|
||||||
)
|
|
||||||
self.scanTypeComboBox.setMaximumWidth(400)
|
self.scanTypeComboBox.setMaximumWidth(400)
|
||||||
hl.addWidget(self.scanTypeComboBox)
|
hl.addWidget(self.scanTypeComboBox)
|
||||||
self.showPreferencesButton = QPushButton(tr("More Options"), self.centralwidget)
|
self.showPreferencesButton = QPushButton(tr("More Options"), self.centralwidget)
|
||||||
self.showPreferencesButton.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
|
self.showPreferencesButton.setSizePolicy(QSizePolicy.Fixed, QSizePolicy.Fixed)
|
||||||
hl.addWidget(self.showPreferencesButton)
|
hl.addWidget(self.showPreferencesButton)
|
||||||
self.verticalLayout.addLayout(hl)
|
self.verticalLayout.addLayout(hl)
|
||||||
self.promptLabel = QLabel(
|
self.promptLabel = QLabel(tr('Select folders to scan and press "Scan".'), self.centralwidget)
|
||||||
tr('Select folders to scan and press "Scan".'), self.centralwidget
|
|
||||||
)
|
|
||||||
self.verticalLayout.addWidget(self.promptLabel)
|
self.verticalLayout.addWidget(self.promptLabel)
|
||||||
self.treeView = QTreeView(self.centralwidget)
|
self.treeView = QTreeView(self.centralwidget)
|
||||||
self.treeView.setSelectionMode(QAbstractItemView.ExtendedSelection)
|
self.treeView.setSelectionMode(QAbstractItemView.ExtendedSelection)
|
||||||
self.treeView.setSelectionBehavior(QAbstractItemView.SelectRows)
|
self.treeView.setSelectionBehavior(QAbstractItemView.SelectRows)
|
||||||
self.treeView.setAcceptDrops(True)
|
self.treeView.setAcceptDrops(True)
|
||||||
triggers = (
|
triggers = (
|
||||||
QAbstractItemView.DoubleClicked
|
QAbstractItemView.DoubleClicked | QAbstractItemView.EditKeyPressed | QAbstractItemView.SelectedClicked
|
||||||
| QAbstractItemView.EditKeyPressed
|
|
||||||
| QAbstractItemView.SelectedClicked
|
|
||||||
)
|
)
|
||||||
self.treeView.setEditTriggers(triggers)
|
self.treeView.setEditTriggers(triggers)
|
||||||
self.treeView.setDragDropOverwriteMode(True)
|
self.treeView.setDragDropOverwriteMode(True)
|
||||||
@@ -216,8 +207,8 @@ class DirectoriesDialog(QMainWindow):
|
|||||||
self.addFolderButton = QPushButton(self.centralwidget)
|
self.addFolderButton = QPushButton(self.centralwidget)
|
||||||
self.addFolderButton.setIcon(QIcon(QPixmap(":/plus")))
|
self.addFolderButton.setIcon(QIcon(QPixmap(":/plus")))
|
||||||
self.horizontalLayout.addWidget(self.addFolderButton)
|
self.horizontalLayout.addWidget(self.addFolderButton)
|
||||||
spacerItem1 = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
|
spacer_item = QSpacerItem(40, 20, QSizePolicy.Expanding, QSizePolicy.Minimum)
|
||||||
self.horizontalLayout.addItem(spacerItem1)
|
self.horizontalLayout.addItem(spacer_item)
|
||||||
self.loadResultsButton = QPushButton(self.centralwidget)
|
self.loadResultsButton = QPushButton(self.centralwidget)
|
||||||
self.loadResultsButton.setText(tr("Load Results"))
|
self.loadResultsButton.setText(tr("Load Results"))
|
||||||
self.horizontalLayout.addWidget(self.loadResultsButton)
|
self.horizontalLayout.addWidget(self.loadResultsButton)
|
||||||
@@ -234,7 +225,7 @@ class DirectoriesDialog(QMainWindow):
|
|||||||
if self.app.prefs.directoriesWindowRect is not None:
|
if self.app.prefs.directoriesWindowRect is not None:
|
||||||
self.setGeometry(self.app.prefs.directoriesWindowRect)
|
self.setGeometry(self.app.prefs.directoriesWindowRect)
|
||||||
else:
|
else:
|
||||||
moveToScreenCenter(self)
|
move_to_screen_center(self)
|
||||||
|
|
||||||
def _setupColumns(self):
|
def _setupColumns(self):
|
||||||
header = self.treeView.header()
|
header = self.treeView.header()
|
||||||
@@ -267,9 +258,7 @@ class DirectoriesDialog(QMainWindow):
|
|||||||
|
|
||||||
def _updateScanTypeList(self):
|
def _updateScanTypeList(self):
|
||||||
try:
|
try:
|
||||||
self.scanTypeComboBox.currentIndexChanged[int].disconnect(
|
self.scanTypeComboBox.currentIndexChanged[int].disconnect(self.scanTypeChanged)
|
||||||
self.scanTypeChanged
|
|
||||||
)
|
|
||||||
except TypeError:
|
except TypeError:
|
||||||
# Not connected, ignore
|
# Not connected, ignore
|
||||||
pass
|
pass
|
||||||
@@ -297,24 +286,33 @@ class DirectoriesDialog(QMainWindow):
|
|||||||
|
|
||||||
# --- Events
|
# --- Events
|
||||||
def addFolderTriggered(self):
|
def addFolderTriggered(self):
|
||||||
|
no_native = not self.app.prefs.use_native_dialogs
|
||||||
title = tr("Select a folder to add to the scanning list")
|
title = tr("Select a folder to add to the scanning list")
|
||||||
flags = QFileDialog.ShowDirsOnly
|
file_dialog = QFileDialog(self, title, self.lastAddedFolder)
|
||||||
dirpath = str(
|
file_dialog.setFileMode(QFileDialog.DirectoryOnly)
|
||||||
QFileDialog.getExistingDirectory(self, title, self.lastAddedFolder, flags)
|
file_dialog.setOption(QFileDialog.DontUseNativeDialog, no_native)
|
||||||
)
|
if no_native:
|
||||||
if not dirpath:
|
file_view = file_dialog.findChild(QListView, "listView")
|
||||||
|
if file_view:
|
||||||
|
file_view.setSelectionMode(QAbstractItemView.MultiSelection)
|
||||||
|
f_tree_view = file_dialog.findChild(QTreeView)
|
||||||
|
if f_tree_view:
|
||||||
|
f_tree_view.setSelectionMode(QAbstractItemView.MultiSelection)
|
||||||
|
if not file_dialog.exec():
|
||||||
return
|
return
|
||||||
self.lastAddedFolder = dirpath
|
|
||||||
self.app.model.add_directory(dirpath)
|
paths = file_dialog.selectedFiles()
|
||||||
self.recentFolders.insertItem(dirpath)
|
self.lastAddedFolder = paths[-1]
|
||||||
|
[self.app.model.add_directory(path) for path in paths]
|
||||||
|
[self.recentFolders.insertItem(path) for path in paths]
|
||||||
|
|
||||||
def appModeButtonSelected(self, index):
|
def appModeButtonSelected(self, index):
|
||||||
if index == 2:
|
if index == 2:
|
||||||
mode = AppMode.Picture
|
mode = AppMode.PICTURE
|
||||||
elif index == 1:
|
elif index == 1:
|
||||||
mode = AppMode.Music
|
mode = AppMode.MUSIC
|
||||||
else:
|
else:
|
||||||
mode = AppMode.Standard
|
mode = AppMode.STANDARD
|
||||||
self.app.model.app_mode = mode
|
self.app.model.app_mode = mode
|
||||||
self._updateScanTypeList()
|
self._updateScanTypeList()
|
||||||
|
|
||||||
@@ -362,9 +360,7 @@ class DirectoriesDialog(QMainWindow):
|
|||||||
|
|
||||||
def scanTypeChanged(self, index):
|
def scanTypeChanged(self, index):
|
||||||
scan_options = self.app.model.SCANNER_CLASS.get_scan_options()
|
scan_options = self.app.model.SCANNER_CLASS.get_scan_options()
|
||||||
self.app.prefs.set_scan_type(
|
self.app.prefs.set_scan_type(self.app.model.app_mode, scan_options[index].scan_type)
|
||||||
self.app.model.app_mode, scan_options[index].scan_type
|
|
||||||
)
|
|
||||||
self.app._update_options()
|
self.app._update_options()
|
||||||
|
|
||||||
def selectionChanged(self, selected, deselected):
|
def selectionChanged(self, selected, deselected):
|
||||||
|
|||||||
@@ -6,8 +6,6 @@
|
|||||||
# which should be included with this package. The terms are also available at
|
# which should be included with this package. The terms are also available at
|
||||||
# http://www.gnu.org/licenses/gpl-3.0.html
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
||||||
|
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
from PyQt5.QtCore import pyqtSignal, Qt, QRect, QUrl, QModelIndex, QItemSelection
|
from PyQt5.QtCore import pyqtSignal, Qt, QRect, QUrl, QModelIndex, QItemSelection
|
||||||
from PyQt5.QtWidgets import (
|
from PyQt5.QtWidgets import (
|
||||||
QComboBox,
|
QComboBox,
|
||||||
@@ -44,9 +42,7 @@ class DirectoriesDelegate(QStyledItemDelegate):
|
|||||||
# On OS X (with Qt4.6.0), adding State_Enabled to the flags causes the whole drawing to
|
# On OS X (with Qt4.6.0), adding State_Enabled to the flags causes the whole drawing to
|
||||||
# fail (draw nothing), but it's an OS X only glitch. On Windows, it works alright.
|
# fail (draw nothing), but it's an OS X only glitch. On Windows, it works alright.
|
||||||
cboption.state |= QStyle.State_Enabled
|
cboption.state |= QStyle.State_Enabled
|
||||||
QApplication.style().drawComplexControl(
|
QApplication.style().drawComplexControl(QStyle.CC_ComboBox, cboption, painter)
|
||||||
QStyle.CC_ComboBox, cboption, painter
|
|
||||||
)
|
|
||||||
painter.setBrush(option.palette.text())
|
painter.setBrush(option.palette.text())
|
||||||
rect = QRect(option.rect)
|
rect = QRect(option.rect)
|
||||||
rect.setLeft(rect.left() + 4)
|
rect.setLeft(rect.left() + 4)
|
||||||
@@ -68,6 +64,8 @@ class DirectoriesDelegate(QStyledItemDelegate):
|
|||||||
|
|
||||||
|
|
||||||
class DirectoriesModel(TreeModel):
|
class DirectoriesModel(TreeModel):
|
||||||
|
MIME_TYPE_FORMAT = "text/uri-list"
|
||||||
|
|
||||||
def __init__(self, model, view, **kwargs):
|
def __init__(self, model, view, **kwargs):
|
||||||
super().__init__(**kwargs)
|
super().__init__(**kwargs)
|
||||||
self.model = model
|
self.model = model
|
||||||
@@ -75,14 +73,12 @@ class DirectoriesModel(TreeModel):
|
|||||||
self.view = view
|
self.view = view
|
||||||
self.view.setModel(self)
|
self.view.setModel(self)
|
||||||
|
|
||||||
self.view.selectionModel().selectionChanged[
|
self.view.selectionModel().selectionChanged[(QItemSelection, QItemSelection)].connect(self.selectionChanged)
|
||||||
(QItemSelection, QItemSelection)
|
|
||||||
].connect(self.selectionChanged)
|
|
||||||
|
|
||||||
def _createNode(self, ref, row):
|
def _create_node(self, ref, row):
|
||||||
return RefNode(self, None, ref, row)
|
return RefNode(self, None, ref, row)
|
||||||
|
|
||||||
def _getChildren(self):
|
def _get_children(self):
|
||||||
return list(self.model)
|
return list(self.model)
|
||||||
|
|
||||||
def columnCount(self, parent=QModelIndex()):
|
def columnCount(self, parent=QModelIndex()):
|
||||||
@@ -108,14 +104,12 @@ class DirectoriesModel(TreeModel):
|
|||||||
return QBrush(Qt.red)
|
return QBrush(Qt.red)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def dropMimeData(self, mimeData, action, row, column, parentIndex):
|
def dropMimeData(self, mime_data, action, row, column, parent_index):
|
||||||
# the data in mimeData is urlencoded **in utf-8**!!! What we do is to decode, the mime data
|
# the data in mimeData is urlencoded **in utf-8**
|
||||||
# with 'ascii', which works since it's urlencoded. Then, we pass that to urllib.
|
if not mime_data.hasFormat(self.MIME_TYPE_FORMAT):
|
||||||
if not mimeData.hasFormat("text/uri-list"):
|
|
||||||
return False
|
return False
|
||||||
data = bytes(mimeData.data("text/uri-list")).decode("ascii")
|
data = bytes(mime_data.data(self.MIME_TYPE_FORMAT)).decode("ascii")
|
||||||
unquoted = urllib.parse.unquote(data)
|
urls = data.split("\r\n")
|
||||||
urls = unquoted.split("\r\n")
|
|
||||||
paths = [QUrl(url).toLocalFile() for url in urls if url]
|
paths = [QUrl(url).toLocalFile() for url in urls if url]
|
||||||
for path in paths:
|
for path in paths:
|
||||||
self.model.add_directory(path)
|
self.model.add_directory(path)
|
||||||
@@ -132,13 +126,12 @@ class DirectoriesModel(TreeModel):
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
def headerData(self, section, orientation, role):
|
def headerData(self, section, orientation, role):
|
||||||
if orientation == Qt.Horizontal:
|
if orientation == Qt.Horizontal and role == Qt.DisplayRole and section < len(HEADERS):
|
||||||
if role == Qt.DisplayRole and section < len(HEADERS):
|
|
||||||
return HEADERS[section]
|
return HEADERS[section]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def mimeTypes(self):
|
def mimeTypes(self):
|
||||||
return ["text/uri-list"]
|
return [self.MIME_TYPE_FORMAT]
|
||||||
|
|
||||||
def setData(self, index, value, role):
|
def setData(self, index, value, role):
|
||||||
if not index.isValid() or role != Qt.EditRole or index.column() != 1:
|
if not index.isValid() or role != Qt.EditRole or index.column() != 1:
|
||||||
@@ -155,11 +148,8 @@ class DirectoriesModel(TreeModel):
|
|||||||
|
|
||||||
# --- Events
|
# --- Events
|
||||||
def selectionChanged(self, selected, deselected):
|
def selectionChanged(self, selected, deselected):
|
||||||
newNodes = [
|
new_nodes = [modelIndex.internalPointer().ref for modelIndex in self.view.selectionModel().selectedRows()]
|
||||||
modelIndex.internalPointer().ref
|
self.model.selected_nodes = new_nodes
|
||||||
for modelIndex in self.view.selectionModel().selectedRows()
|
|
||||||
]
|
|
||||||
self.model.selected_nodes = newNodes
|
|
||||||
|
|
||||||
# --- Signals
|
# --- Signals
|
||||||
foldersAdded = pyqtSignal(list)
|
foldersAdded = pyqtSignal(list)
|
||||||
|
|||||||
@@ -5,13 +5,22 @@
|
|||||||
import re
|
import re
|
||||||
from PyQt5.QtCore import Qt, pyqtSlot
|
from PyQt5.QtCore import Qt, pyqtSlot
|
||||||
from PyQt5.QtWidgets import (
|
from PyQt5.QtWidgets import (
|
||||||
QPushButton, QLineEdit, QVBoxLayout, QGridLayout, QDialog,
|
QPushButton,
|
||||||
QTableView, QAbstractItemView, QSpacerItem, QSizePolicy, QHeaderView
|
QLineEdit,
|
||||||
|
QVBoxLayout,
|
||||||
|
QGridLayout,
|
||||||
|
QDialog,
|
||||||
|
QTableView,
|
||||||
|
QAbstractItemView,
|
||||||
|
QSpacerItem,
|
||||||
|
QSizePolicy,
|
||||||
|
QHeaderView,
|
||||||
)
|
)
|
||||||
from .exclude_list_table import ExcludeListTable
|
from .exclude_list_table import ExcludeListTable
|
||||||
|
|
||||||
from core.exclude import AlreadyThereException
|
from core.exclude import AlreadyThereException
|
||||||
from hscommon.trans import trget
|
from hscommon.trans import trget
|
||||||
|
|
||||||
tr = trget("ui")
|
tr = trget("ui")
|
||||||
|
|
||||||
|
|
||||||
@@ -51,9 +60,7 @@ class ExcludeListDialog(QDialog):
|
|||||||
self.testLine = QLineEdit()
|
self.testLine = QLineEdit()
|
||||||
self.tableView = QTableView()
|
self.tableView = QTableView()
|
||||||
triggers = (
|
triggers = (
|
||||||
QAbstractItemView.DoubleClicked
|
QAbstractItemView.DoubleClicked | QAbstractItemView.EditKeyPressed | QAbstractItemView.SelectedClicked
|
||||||
| QAbstractItemView.EditKeyPressed
|
|
||||||
| QAbstractItemView.SelectedClicked
|
|
||||||
)
|
)
|
||||||
self.tableView.setEditTriggers(triggers)
|
self.tableView.setEditTriggers(triggers)
|
||||||
self.tableView.setSelectionMode(QTableView.ExtendedSelection)
|
self.tableView.setSelectionMode(QTableView.ExtendedSelection)
|
||||||
@@ -116,31 +123,32 @@ class ExcludeListDialog(QDialog):
|
|||||||
if not input_text:
|
if not input_text:
|
||||||
self.reset_input_style()
|
self.reset_input_style()
|
||||||
return
|
return
|
||||||
# if at least one row matched, we know whether table is highlighted or not
|
# If at least one row matched, we know whether table is highlighted or not
|
||||||
self._row_matched = self.model.test_string(input_text)
|
self._row_matched = self.model.test_string(input_text)
|
||||||
self.table.refresh()
|
self.table.refresh()
|
||||||
|
|
||||||
|
# Test the string currently in the input text box as well
|
||||||
input_regex = self.inputLine.text()
|
input_regex = self.inputLine.text()
|
||||||
if not input_regex:
|
if not input_regex:
|
||||||
self.reset_input_style()
|
self.reset_input_style()
|
||||||
return
|
return
|
||||||
|
compiled = None
|
||||||
try:
|
try:
|
||||||
compiled = re.compile(input_regex)
|
compiled = re.compile(input_regex)
|
||||||
except re.error:
|
except re.error:
|
||||||
self.reset_input_style()
|
self.reset_input_style()
|
||||||
return
|
return
|
||||||
match = compiled.match(input_text)
|
if self.model.is_match(input_text, compiled):
|
||||||
if match:
|
|
||||||
self._input_styled = True
|
|
||||||
self.inputLine.setStyleSheet("background-color: rgb(10, 200, 10);")
|
self.inputLine.setStyleSheet("background-color: rgb(10, 200, 10);")
|
||||||
|
self._input_styled = True
|
||||||
else:
|
else:
|
||||||
self.reset_input_style()
|
self.reset_input_style()
|
||||||
|
|
||||||
def reset_input_style(self):
|
def reset_input_style(self):
|
||||||
"""Reset regex input line background"""
|
"""Reset regex input line background"""
|
||||||
if self._input_styled:
|
if self._input_styled:
|
||||||
self._input_styled = False
|
|
||||||
self.inputLine.setStyleSheet(self.styleSheet())
|
self.inputLine.setStyleSheet(self.styleSheet())
|
||||||
|
self._input_styled = False
|
||||||
|
|
||||||
def reset_table_style(self):
|
def reset_table_style(self):
|
||||||
if self._row_matched:
|
if self._row_matched:
|
||||||
@@ -149,7 +157,9 @@ class ExcludeListDialog(QDialog):
|
|||||||
self.table.refresh()
|
self.table.refresh()
|
||||||
|
|
||||||
def display_help_message(self):
|
def display_help_message(self):
|
||||||
self.app.show_message(tr("""\
|
self.app.show_message(
|
||||||
|
tr(
|
||||||
|
"""\
|
||||||
These (case sensitive) python regular expressions will filter out files during scans.<br>\
|
These (case sensitive) python regular expressions will filter out files during scans.<br>\
|
||||||
Directores will also have their <strong>default state</strong> set to Excluded \
|
Directores will also have their <strong>default state</strong> set to Excluded \
|
||||||
in the Directories tab if their name happens to match one of the selected regular expressions.<br>\
|
in the Directories tab if their name happens to match one of the selected regular expressions.<br>\
|
||||||
@@ -162,4 +172,6 @@ You can test the regular expression with the "test string" button after pasting
|
|||||||
<code>C:\\\\User\\My Pictures\\test.png</code><br><br>
|
<code>C:\\\\User\\My Pictures\\test.png</code><br><br>
|
||||||
Matching regular expressions will be highlighted.<br>\
|
Matching regular expressions will be highlighted.<br>\
|
||||||
If there is at least one highlight, the path or filename tested will be ignored during scans.<br><br>\
|
If there is at least one highlight, the path or filename tested will be ignored during scans.<br><br>\
|
||||||
Directories and files starting with a period '.' are filtered out by default.<br><br>"""))
|
Directories and files starting with a period '.' are filtered out by default.<br><br>"""
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|||||||
@@ -8,15 +8,14 @@ from PyQt5.QtGui import QFont, QFontMetrics, QIcon, QColor
|
|||||||
from qtlib.column import Column
|
from qtlib.column import Column
|
||||||
from qtlib.table import Table
|
from qtlib.table import Table
|
||||||
from hscommon.trans import trget
|
from hscommon.trans import trget
|
||||||
|
|
||||||
tr = trget("ui")
|
tr = trget("ui")
|
||||||
|
|
||||||
|
|
||||||
class ExcludeListTable(Table):
|
class ExcludeListTable(Table):
|
||||||
"""Model for exclude list"""
|
"""Model for exclude list"""
|
||||||
COLUMNS = [
|
|
||||||
Column("marked", defaultWidth=15),
|
COLUMNS = [Column("marked", default_width=15), Column("regex", default_width=230)]
|
||||||
Column("regex", defaultWidth=230)
|
|
||||||
]
|
|
||||||
|
|
||||||
def __init__(self, app, view, **kwargs):
|
def __init__(self, app, view, **kwargs):
|
||||||
model = app.model.exclude_list_dialog.exclude_list_table # pointer to GUITable
|
model = app.model.exclude_list_dialog.exclude_list_table # pointer to GUITable
|
||||||
@@ -26,7 +25,6 @@ class ExcludeListTable(Table):
|
|||||||
view.setFont(font)
|
view.setFont(font)
|
||||||
fm = QFontMetrics(font)
|
fm = QFontMetrics(font)
|
||||||
view.verticalHeader().setDefaultSectionSize(fm.height() + 2)
|
view.verticalHeader().setDefaultSectionSize(fm.height() + 2)
|
||||||
# app.willSavePrefs.connect(self.appWillSavePrefs)
|
|
||||||
|
|
||||||
def _getData(self, row, column, role):
|
def _getData(self, row, column, role):
|
||||||
if column.name == "marked":
|
if column.name == "marked":
|
||||||
@@ -44,8 +42,7 @@ class ExcludeListTable(Table):
|
|||||||
elif role == Qt.BackgroundRole and column.name == "regex":
|
elif role == Qt.BackgroundRole and column.name == "regex":
|
||||||
if row.highlight:
|
if row.highlight:
|
||||||
return QColor(10, 200, 10) # green
|
return QColor(10, 200, 10) # green
|
||||||
elif role == Qt.EditRole:
|
elif role == Qt.EditRole and column.name == "regex":
|
||||||
if column.name == "regex":
|
|
||||||
return row.data[column.name]
|
return row.data[column.name]
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@@ -63,15 +60,6 @@ class ExcludeListTable(Table):
|
|||||||
if column.name == "marked":
|
if column.name == "marked":
|
||||||
row.marked = bool(value)
|
row.marked = bool(value)
|
||||||
return True
|
return True
|
||||||
elif role == Qt.EditRole:
|
elif role == Qt.EditRole and column.name == "regex":
|
||||||
if column.name == "regex":
|
|
||||||
return self.model.rename_selected(value)
|
return self.model.rename_selected(value)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# def sort(self, column, order):
|
|
||||||
# column = self.model.COLUMNS[column]
|
|
||||||
# self.model.sort(column.name, order == Qt.AscendingOrder)
|
|
||||||
|
|
||||||
# # --- Events
|
|
||||||
# def appWillSavePrefs(self):
|
|
||||||
# self.model.columns.save_columns()
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ from PyQt5.QtWidgets import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
from hscommon.trans import trget
|
from hscommon.trans import trget
|
||||||
from qtlib.util import horizontalWrap
|
from qtlib.util import horizontal_wrap
|
||||||
from .ignore_list_table import IgnoreListTable
|
from .ignore_list_table import IgnoreListTable
|
||||||
|
|
||||||
tr = trget("ui")
|
tr = trget("ui")
|
||||||
@@ -56,9 +56,7 @@ class IgnoreListDialog(QDialog):
|
|||||||
self.clearButton = QPushButton(tr("Clear"))
|
self.clearButton = QPushButton(tr("Clear"))
|
||||||
self.closeButton = QPushButton(tr("Close"))
|
self.closeButton = QPushButton(tr("Close"))
|
||||||
self.verticalLayout.addLayout(
|
self.verticalLayout.addLayout(
|
||||||
horizontalWrap(
|
horizontal_wrap([self.removeSelectedButton, self.clearButton, None, self.closeButton])
|
||||||
[self.removeSelectedButton, self.clearButton, None, self.closeButton]
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# --- model --> view
|
# --- model --> view
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user