mirror of
https://github.com/arsenetar/dupeguru.git
synced 2025-07-05 06:53:20 +00:00
hscommon.path --> pathlib
core tests pass
This commit is contained in:
parent
160aaaf880
commit
740e2ab263
18
core/app.py
18
core/app.py
@ -16,9 +16,9 @@ import shutil
|
|||||||
|
|
||||||
from send2trash import send2trash
|
from send2trash import send2trash
|
||||||
from jobprogress import job
|
from jobprogress import job
|
||||||
|
from pathlib import Path
|
||||||
from hscommon.reg import RegistrableApplication
|
from hscommon.reg import RegistrableApplication
|
||||||
from hscommon.notify import Broadcaster
|
from hscommon.notify import Broadcaster
|
||||||
from hscommon.path import Path
|
|
||||||
from hscommon.conflict import smart_move, smart_copy
|
from hscommon.conflict import smart_move, smart_copy
|
||||||
from hscommon.gui.progress_window import ProgressWindow
|
from hscommon.gui.progress_window import ProgressWindow
|
||||||
from hscommon.util import (delete_if_empty, first, escape, nonone, format_time_decimal, allsame,
|
from hscommon.util import (delete_if_empty, first, escape, nonone, format_time_decimal, allsame,
|
||||||
@ -334,29 +334,29 @@ class DupeGuru(RegistrableApplication, Broadcaster):
|
|||||||
def clean_empty_dirs(self, path):
|
def clean_empty_dirs(self, path):
|
||||||
if self.options['clean_empty_dirs']:
|
if self.options['clean_empty_dirs']:
|
||||||
while delete_if_empty(path, ['.DS_Store']):
|
while delete_if_empty(path, ['.DS_Store']):
|
||||||
path = path[:-1]
|
path = path.parent()
|
||||||
|
|
||||||
def copy_or_move(self, dupe, copy: bool, destination: str, dest_type: DestType):
|
def copy_or_move(self, dupe, copy: bool, destination: str, dest_type: DestType):
|
||||||
source_path = dupe.path
|
source_path = dupe.path
|
||||||
location_path = first(p for p in self.directories if dupe.path in p)
|
location_path = first(p for p in self.directories if dupe.path in p)
|
||||||
dest_path = Path(destination)
|
dest_path = Path(destination)
|
||||||
if dest_type in {DestType.Relative, DestType.Absolute}:
|
if dest_type in {DestType.Relative, DestType.Absolute}:
|
||||||
# no filename, no windows drive letter
|
# no filename, no windows drive letter, no root
|
||||||
source_base = source_path.remove_drive_letter()[:-1]
|
source_base = source_path.relative().parent()
|
||||||
if dest_type == DestType.Relative:
|
if dest_type == DestType.Relative:
|
||||||
source_base = source_base[location_path:]
|
source_base = source_base.relative_to(location_path.relative())
|
||||||
dest_path = dest_path + source_base
|
dest_path = dest_path[source_base]
|
||||||
if not dest_path.exists():
|
if not dest_path.exists():
|
||||||
dest_path.makedirs()
|
dest_path.mkdir(parents=True)
|
||||||
# Add filename to dest_path. For file move/copy, it's not required, but for folders, yes.
|
# Add filename to dest_path. For file move/copy, it's not required, but for folders, yes.
|
||||||
dest_path = dest_path + source_path[-1]
|
dest_path = dest_path[source_path.name]
|
||||||
logging.debug("Copy/Move operation from '%s' to '%s'", source_path, dest_path)
|
logging.debug("Copy/Move operation from '%s' to '%s'", source_path, dest_path)
|
||||||
# Raises an EnvironmentError if there's a problem
|
# Raises an EnvironmentError if there's a problem
|
||||||
if copy:
|
if copy:
|
||||||
smart_copy(source_path, dest_path)
|
smart_copy(source_path, dest_path)
|
||||||
else:
|
else:
|
||||||
smart_move(source_path, dest_path)
|
smart_move(source_path, dest_path)
|
||||||
self.clean_empty_dirs(source_path[:-1])
|
self.clean_empty_dirs(source_path.parent())
|
||||||
|
|
||||||
def copy_or_move_marked(self, copy):
|
def copy_or_move_marked(self, copy):
|
||||||
def do(j):
|
def do(j):
|
||||||
|
@ -8,9 +8,9 @@
|
|||||||
|
|
||||||
from xml.etree import ElementTree as ET
|
from xml.etree import ElementTree as ET
|
||||||
import logging
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from jobprogress import job
|
from jobprogress import job
|
||||||
from hscommon.path import Path
|
|
||||||
from hscommon.util import FileOrPath
|
from hscommon.util import FileOrPath
|
||||||
|
|
||||||
from . import fs
|
from . import fs
|
||||||
@ -35,7 +35,7 @@ class Directories:
|
|||||||
|
|
||||||
def __contains__(self, path):
|
def __contains__(self, path):
|
||||||
for p in self._dirs:
|
for p in self._dirs:
|
||||||
if path in p:
|
if path == p or p in path.parents():
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
@ -51,7 +51,7 @@ class Directories:
|
|||||||
#---Private
|
#---Private
|
||||||
def _default_state_for_path(self, path):
|
def _default_state_for_path(self, path):
|
||||||
# Override this in subclasses to specify the state of some special folders.
|
# Override this in subclasses to specify the state of some special folders.
|
||||||
if path[-1].startswith('.'): # hidden
|
if path.name.startswith('.'): # hidden
|
||||||
return DirectoryState.Excluded
|
return DirectoryState.Excluded
|
||||||
|
|
||||||
def _get_files(self, from_path, j):
|
def _get_files(self, from_path, j):
|
||||||
@ -61,7 +61,7 @@ class Directories:
|
|||||||
# Recursively get files from folders with lots of subfolder is expensive. However, there
|
# Recursively get files from folders with lots of subfolder is expensive. However, there
|
||||||
# might be a subfolder in this path that is not excluded. What we want to do is to skim
|
# might be a subfolder in this path that is not excluded. What we want to do is to skim
|
||||||
# through self.states and see if we must continue, or we can stop right here to save time
|
# through self.states and see if we must continue, or we can stop right here to save time
|
||||||
if not any(p[:len(from_path)] == from_path for p in self.states):
|
if not any(p.parts[:len(from_path.parts)] == from_path.parts for p in self.states):
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
filepaths = set()
|
filepaths = set()
|
||||||
@ -72,9 +72,8 @@ class Directories:
|
|||||||
file.is_ref = state == DirectoryState.Reference
|
file.is_ref = state == DirectoryState.Reference
|
||||||
filepaths.add(file.path)
|
filepaths.add(file.path)
|
||||||
yield file
|
yield file
|
||||||
subpaths = [from_path + name for name in from_path.listdir()]
|
|
||||||
# it's possible that a folder (bundle) gets into the file list. in that case, we don't want to recurse into it
|
# it's possible that a folder (bundle) gets into the file list. in that case, we don't want to recurse into it
|
||||||
subfolders = [p for p in subpaths if not p.islink() and p.isdir() and p not in filepaths]
|
subfolders = [p for p in from_path.glob('*') if not p.is_symlink() and p.is_dir() and p not in filepaths]
|
||||||
for subfolder in subfolders:
|
for subfolder in subfolders:
|
||||||
for file in self._get_files(subfolder, j):
|
for file in self._get_files(subfolder, j):
|
||||||
yield file
|
yield file
|
||||||
@ -114,9 +113,9 @@ class Directories:
|
|||||||
def get_subfolders(path):
|
def get_subfolders(path):
|
||||||
"""returns a sorted list of paths corresponding to subfolders in `path`"""
|
"""returns a sorted list of paths corresponding to subfolders in `path`"""
|
||||||
try:
|
try:
|
||||||
names = [name for name in path.listdir() if (path + name).isdir()]
|
names = [p.name for p in path.glob('*') if p.is_dir()]
|
||||||
names.sort(key=lambda x:x.lower())
|
names.sort(key=lambda x:x.lower())
|
||||||
return [path + name for name in names]
|
return [path[name] for name in names]
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
return []
|
return []
|
||||||
|
|
||||||
@ -147,7 +146,7 @@ class Directories:
|
|||||||
default_state = self._default_state_for_path(path)
|
default_state = self._default_state_for_path(path)
|
||||||
if default_state is not None:
|
if default_state is not None:
|
||||||
return default_state
|
return default_state
|
||||||
parent = path[:-1]
|
parent = path.parent()
|
||||||
if parent in self:
|
if parent in self:
|
||||||
return self.get_state(parent)
|
return self.get_state(parent)
|
||||||
else:
|
else:
|
||||||
|
26
core/fs.py
26
core/fs.py
@ -129,14 +129,14 @@ class File:
|
|||||||
#--- Public
|
#--- Public
|
||||||
@classmethod
|
@classmethod
|
||||||
def can_handle(cls, path):
|
def can_handle(cls, path):
|
||||||
return not path.islink() and path.isfile()
|
return not path.is_symlink() and path.is_file()
|
||||||
|
|
||||||
def rename(self, newname):
|
def rename(self, newname):
|
||||||
if newname == self.name:
|
if newname == self.name:
|
||||||
return
|
return
|
||||||
destpath = self.path[:-1] + newname
|
destpath = self.path.parent()[newname]
|
||||||
if destpath.exists():
|
if destpath.exists():
|
||||||
raise AlreadyExistsError(newname, self.path[:-1])
|
raise AlreadyExistsError(newname, self.path.parent())
|
||||||
try:
|
try:
|
||||||
self.path.rename(destpath)
|
self.path.rename(destpath)
|
||||||
except EnvironmentError:
|
except EnvironmentError:
|
||||||
@ -157,11 +157,11 @@ class File:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self):
|
def name(self):
|
||||||
return self.path[-1]
|
return self.path.name
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def folder_path(self):
|
def folder_path(self):
|
||||||
return self.path[:-1]
|
return self.path.parent()
|
||||||
|
|
||||||
|
|
||||||
class Folder(File):
|
class Folder(File):
|
||||||
@ -203,14 +203,13 @@ class Folder(File):
|
|||||||
@property
|
@property
|
||||||
def subfolders(self):
|
def subfolders(self):
|
||||||
if self._subfolders is None:
|
if self._subfolders is None:
|
||||||
subpaths = [self.path + name for name in self.path.listdir()]
|
subfolders = [p for p in self.path.glob('*') if not p.is_symlink() and p.is_dir()]
|
||||||
subfolders = [p for p in subpaths if not p.islink() and p.isdir()]
|
|
||||||
self._subfolders = [Folder(p) for p in subfolders]
|
self._subfolders = [Folder(p) for p in subfolders]
|
||||||
return self._subfolders
|
return self._subfolders
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def can_handle(cls, path):
|
def can_handle(cls, path):
|
||||||
return not path.islink() and path.isdir()
|
return not path.is_symlink() and path.is_dir()
|
||||||
|
|
||||||
|
|
||||||
def get_file(path, fileclasses=[File]):
|
def get_file(path, fileclasses=[File]):
|
||||||
@ -220,18 +219,9 @@ def get_file(path, fileclasses=[File]):
|
|||||||
|
|
||||||
def get_files(path, fileclasses=[File]):
|
def get_files(path, fileclasses=[File]):
|
||||||
assert all(issubclass(fileclass, File) for fileclass in fileclasses)
|
assert all(issubclass(fileclass, File) for fileclass in fileclasses)
|
||||||
def combine_paths(p1, p2):
|
|
||||||
try:
|
|
||||||
return p1 + p2
|
|
||||||
except Exception:
|
|
||||||
# This is temporary debug logging for #84.
|
|
||||||
logging.warning("Failed to combine %r and %r.", p1, p2)
|
|
||||||
raise
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
paths = [combine_paths(path, name) for name in path.listdir()]
|
|
||||||
result = []
|
result = []
|
||||||
for path in paths:
|
for path in path.glob('*'):
|
||||||
file = get_file(path, fileclasses=fileclasses)
|
file = get_file(path, fileclasses=fileclasses)
|
||||||
if file is not None:
|
if file is not None:
|
||||||
result.append(file)
|
result.append(file)
|
||||||
|
@ -31,7 +31,7 @@ class DirectoryNode(Node):
|
|||||||
self.clear()
|
self.clear()
|
||||||
subpaths = self._tree.app.directories.get_subfolders(self._directory_path)
|
subpaths = self._tree.app.directories.get_subfolders(self._directory_path)
|
||||||
for path in subpaths:
|
for path in subpaths:
|
||||||
self.append(DirectoryNode(self._tree, path, path[-1]))
|
self.append(DirectoryNode(self._tree, path, path.name))
|
||||||
self._loaded = True
|
self._loaded = True
|
||||||
|
|
||||||
def update_all_states(self):
|
def update_all_states(self):
|
||||||
|
@ -79,7 +79,7 @@ class FolderCategory(ValueListCategory):
|
|||||||
|
|
||||||
def sort_key(self, dupe, crit_value):
|
def sort_key(self, dupe, crit_value):
|
||||||
value = self.extract_value(dupe)
|
value = self.extract_value(dupe)
|
||||||
if value[:len(crit_value)] == crit_value:
|
if crit_value == value or crit_value in value.parents():
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
return 1
|
return 1
|
||||||
|
@ -117,7 +117,7 @@ class Scanner:
|
|||||||
return False
|
return False
|
||||||
if is_same_with_digit(refname, dupename):
|
if is_same_with_digit(refname, dupename):
|
||||||
return True
|
return True
|
||||||
return len(dupe.path) > len(ref.path)
|
return len(dupe.path.parts) > len(ref.path.parts)
|
||||||
|
|
||||||
def get_dupe_groups(self, files, j=job.nulljob):
|
def get_dupe_groups(self, files, j=job.nulljob):
|
||||||
j = j.start_subjob([8, 2])
|
j = j.start_subjob([8, 2])
|
||||||
@ -140,7 +140,7 @@ class Scanner:
|
|||||||
toremove = set()
|
toremove = set()
|
||||||
last_parent_path = sortedpaths[0]
|
last_parent_path = sortedpaths[0]
|
||||||
for p in sortedpaths[1:]:
|
for p in sortedpaths[1:]:
|
||||||
if p in last_parent_path:
|
if last_parent_path in p.parents():
|
||||||
toremove.add(p)
|
toremove.add(p)
|
||||||
else:
|
else:
|
||||||
last_parent_path = p
|
last_parent_path = p
|
||||||
|
@ -9,10 +9,9 @@
|
|||||||
import os
|
import os
|
||||||
import os.path as op
|
import os.path as op
|
||||||
import logging
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from pytest import mark
|
from pytest import mark
|
||||||
from hscommon import io
|
|
||||||
from hscommon.path import Path
|
|
||||||
import hscommon.conflict
|
import hscommon.conflict
|
||||||
import hscommon.util
|
import hscommon.util
|
||||||
from hscommon.testutil import CallLogger, eq_, log_calls
|
from hscommon.testutil import CallLogger, eq_, log_calls
|
||||||
@ -57,7 +56,7 @@ class TestCaseDupeGuru:
|
|||||||
# for this unit is pathetic. What's done is done. My approach now is to add tests for
|
# for this unit is pathetic. What's done is done. My approach now is to add tests for
|
||||||
# every change I want to make. The blowup was caused by a missing import.
|
# every change I want to make. The blowup was caused by a missing import.
|
||||||
p = Path(str(tmpdir))
|
p = Path(str(tmpdir))
|
||||||
io.open(p + 'foo', 'w').close()
|
p['foo'].touch()
|
||||||
monkeypatch.setattr(hscommon.conflict, 'smart_copy', log_calls(lambda source_path, dest_path: None))
|
monkeypatch.setattr(hscommon.conflict, 'smart_copy', log_calls(lambda source_path, dest_path: None))
|
||||||
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
||||||
monkeypatch.setattr(app, 'smart_copy', hscommon.conflict.smart_copy)
|
monkeypatch.setattr(app, 'smart_copy', hscommon.conflict.smart_copy)
|
||||||
@ -68,19 +67,19 @@ class TestCaseDupeGuru:
|
|||||||
dgapp.copy_or_move(f, True, 'some_destination', 0)
|
dgapp.copy_or_move(f, True, 'some_destination', 0)
|
||||||
eq_(1, len(hscommon.conflict.smart_copy.calls))
|
eq_(1, len(hscommon.conflict.smart_copy.calls))
|
||||||
call = hscommon.conflict.smart_copy.calls[0]
|
call = hscommon.conflict.smart_copy.calls[0]
|
||||||
eq_(call['dest_path'], op.join('some_destination', 'foo'))
|
eq_(call['dest_path'], Path('some_destination', 'foo'))
|
||||||
eq_(call['source_path'], f.path)
|
eq_(call['source_path'], f.path)
|
||||||
|
|
||||||
def test_copy_or_move_clean_empty_dirs(self, tmpdir, monkeypatch):
|
def test_copy_or_move_clean_empty_dirs(self, tmpdir, monkeypatch):
|
||||||
tmppath = Path(str(tmpdir))
|
tmppath = Path(str(tmpdir))
|
||||||
sourcepath = tmppath + 'source'
|
sourcepath = tmppath['source']
|
||||||
io.mkdir(sourcepath)
|
sourcepath.mkdir()
|
||||||
io.open(sourcepath + 'myfile', 'w')
|
sourcepath['myfile'].touch()
|
||||||
app = TestApp().app
|
app = TestApp().app
|
||||||
app.directories.add_path(tmppath)
|
app.directories.add_path(tmppath)
|
||||||
[myfile] = app.directories.get_files()
|
[myfile] = app.directories.get_files()
|
||||||
monkeypatch.setattr(app, 'clean_empty_dirs', log_calls(lambda path: None))
|
monkeypatch.setattr(app, 'clean_empty_dirs', log_calls(lambda path: None))
|
||||||
app.copy_or_move(myfile, False, tmppath + 'dest', 0)
|
app.copy_or_move(myfile, False, tmppath['dest'], 0)
|
||||||
calls = app.clean_empty_dirs.calls
|
calls = app.clean_empty_dirs.calls
|
||||||
eq_(1, len(calls))
|
eq_(1, len(calls))
|
||||||
eq_(sourcepath, calls[0]['path'])
|
eq_(sourcepath, calls[0]['path'])
|
||||||
@ -104,8 +103,8 @@ class TestCaseDupeGuru:
|
|||||||
# If the ignore_hardlink_matches option is set, don't match files hardlinking to the same
|
# If the ignore_hardlink_matches option is set, don't match files hardlinking to the same
|
||||||
# inode.
|
# inode.
|
||||||
tmppath = Path(str(tmpdir))
|
tmppath = Path(str(tmpdir))
|
||||||
io.open(tmppath + 'myfile', 'w').write('foo')
|
tmppath['myfile'].open('wt').write('foo')
|
||||||
os.link(str(tmppath + 'myfile'), str(tmppath + 'hardlink'))
|
os.link(str(tmppath['myfile']), str(tmppath['hardlink']))
|
||||||
app = TestApp().app
|
app = TestApp().app
|
||||||
app.directories.add_path(tmppath)
|
app.directories.add_path(tmppath)
|
||||||
app.scanner.scan_type = ScanType.Contents
|
app.scanner.scan_type = ScanType.Contents
|
||||||
@ -145,7 +144,7 @@ class TestCaseDupeGuru_clean_empty_dirs:
|
|||||||
# delete_if_empty must be recursively called up in the path until it returns False
|
# delete_if_empty must be recursively called up in the path until it returns False
|
||||||
@log_calls
|
@log_calls
|
||||||
def mock_delete_if_empty(path, files_to_delete=[]):
|
def mock_delete_if_empty(path, files_to_delete=[]):
|
||||||
return len(path) > 1
|
return len(path.parts) > 1
|
||||||
|
|
||||||
monkeypatch.setattr(hscommon.util, 'delete_if_empty', mock_delete_if_empty)
|
monkeypatch.setattr(hscommon.util, 'delete_if_empty', mock_delete_if_empty)
|
||||||
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
# XXX This monkeypatch is temporary. will be fixed in a better monkeypatcher.
|
||||||
@ -171,8 +170,8 @@ class TestCaseDupeGuruWithResults:
|
|||||||
self.rtable.refresh()
|
self.rtable.refresh()
|
||||||
tmpdir = request.getfuncargvalue('tmpdir')
|
tmpdir = request.getfuncargvalue('tmpdir')
|
||||||
tmppath = Path(str(tmpdir))
|
tmppath = Path(str(tmpdir))
|
||||||
io.mkdir(tmppath + 'foo')
|
tmppath['foo'].mkdir()
|
||||||
io.mkdir(tmppath + 'bar')
|
tmppath['bar'].mkdir()
|
||||||
self.app.directories.add_path(tmppath)
|
self.app.directories.add_path(tmppath)
|
||||||
|
|
||||||
def test_GetObjects(self, do_setup):
|
def test_GetObjects(self, do_setup):
|
||||||
@ -404,12 +403,9 @@ class TestCaseDupeGuru_renameSelected:
|
|||||||
def pytest_funcarg__do_setup(self, request):
|
def pytest_funcarg__do_setup(self, request):
|
||||||
tmpdir = request.getfuncargvalue('tmpdir')
|
tmpdir = request.getfuncargvalue('tmpdir')
|
||||||
p = Path(str(tmpdir))
|
p = Path(str(tmpdir))
|
||||||
fp = open(str(p + 'foo bar 1'),mode='w')
|
p['foo bar 1'].touch()
|
||||||
fp.close()
|
p['foo bar 2'].touch()
|
||||||
fp = open(str(p + 'foo bar 2'),mode='w')
|
p['foo bar 3'].touch()
|
||||||
fp.close()
|
|
||||||
fp = open(str(p + 'foo bar 3'),mode='w')
|
|
||||||
fp.close()
|
|
||||||
files = fs.get_files(p)
|
files = fs.get_files(p)
|
||||||
for f in files:
|
for f in files:
|
||||||
f.is_ref = False
|
f.is_ref = False
|
||||||
@ -431,7 +427,7 @@ class TestCaseDupeGuru_renameSelected:
|
|||||||
g = self.groups[0]
|
g = self.groups[0]
|
||||||
self.rtable.select([1])
|
self.rtable.select([1])
|
||||||
assert app.rename_selected('renamed')
|
assert app.rename_selected('renamed')
|
||||||
names = io.listdir(self.p)
|
names = [p.name for p in self.p.glob('*')]
|
||||||
assert 'renamed' in names
|
assert 'renamed' in names
|
||||||
assert 'foo bar 2' not in names
|
assert 'foo bar 2' not in names
|
||||||
eq_(g.dupes[0].name, 'renamed')
|
eq_(g.dupes[0].name, 'renamed')
|
||||||
@ -444,7 +440,7 @@ class TestCaseDupeGuru_renameSelected:
|
|||||||
assert not app.rename_selected('renamed')
|
assert not app.rename_selected('renamed')
|
||||||
msg = logging.warning.calls[0]['msg']
|
msg = logging.warning.calls[0]['msg']
|
||||||
eq_('dupeGuru Warning: list index out of range', msg)
|
eq_('dupeGuru Warning: list index out of range', msg)
|
||||||
names = io.listdir(self.p)
|
names = [p.name for p in self.p.glob('*')]
|
||||||
assert 'renamed' not in names
|
assert 'renamed' not in names
|
||||||
assert 'foo bar 2' in names
|
assert 'foo bar 2' in names
|
||||||
eq_(g.dupes[0].name, 'foo bar 2')
|
eq_(g.dupes[0].name, 'foo bar 2')
|
||||||
@ -457,7 +453,7 @@ class TestCaseDupeGuru_renameSelected:
|
|||||||
assert not app.rename_selected('foo bar 1')
|
assert not app.rename_selected('foo bar 1')
|
||||||
msg = logging.warning.calls[0]['msg']
|
msg = logging.warning.calls[0]['msg']
|
||||||
assert msg.startswith('dupeGuru Warning: \'foo bar 1\' already exists in')
|
assert msg.startswith('dupeGuru Warning: \'foo bar 1\' already exists in')
|
||||||
names = io.listdir(self.p)
|
names = [p.name for p in self.p.glob('*')]
|
||||||
assert 'foo bar 1' in names
|
assert 'foo bar 1' in names
|
||||||
assert 'foo bar 2' in names
|
assert 'foo bar 2' in names
|
||||||
eq_(g.dupes[0].name, 'foo bar 2')
|
eq_(g.dupes[0].name, 'foo bar 2')
|
||||||
@ -467,9 +463,9 @@ class TestAppWithDirectoriesInTree:
|
|||||||
def pytest_funcarg__do_setup(self, request):
|
def pytest_funcarg__do_setup(self, request):
|
||||||
tmpdir = request.getfuncargvalue('tmpdir')
|
tmpdir = request.getfuncargvalue('tmpdir')
|
||||||
p = Path(str(tmpdir))
|
p = Path(str(tmpdir))
|
||||||
io.mkdir(p + 'sub1')
|
p['sub1'].mkdir()
|
||||||
io.mkdir(p + 'sub2')
|
p['sub2'].mkdir()
|
||||||
io.mkdir(p + 'sub3')
|
p['sub3'].mkdir()
|
||||||
app = TestApp()
|
app = TestApp()
|
||||||
self.app = app.app
|
self.app = app.app
|
||||||
self.dtree = app.dtree
|
self.dtree = app.dtree
|
||||||
|
@ -6,8 +6,9 @@
|
|||||||
# which should be included with this package. The terms are also available at
|
# which should be included with this package. The terms are also available at
|
||||||
# http://www.hardcoded.net/licenses/bsd_license
|
# http://www.hardcoded.net/licenses/bsd_license
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from hscommon.testutil import TestApp as TestAppBase, eq_, with_app
|
from hscommon.testutil import TestApp as TestAppBase, eq_, with_app
|
||||||
from hscommon.path import Path
|
|
||||||
from hscommon.util import get_file_ext, format_size
|
from hscommon.util import get_file_ext, format_size
|
||||||
from hscommon.gui.column import Column
|
from hscommon.gui.column import Column
|
||||||
from jobprogress.job import nulljob, JobCancelled
|
from jobprogress.job import nulljob, JobCancelled
|
||||||
@ -100,11 +101,11 @@ class NamedObject:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def path(self):
|
def path(self):
|
||||||
return self._folder + self.name
|
return self._folder[self.name]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def folder_path(self):
|
def folder_path(self):
|
||||||
return self.path[:-1]
|
return self.path.parent()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def extension(self):
|
def extension(self):
|
||||||
|
@ -10,39 +10,32 @@ import os
|
|||||||
import time
|
import time
|
||||||
import tempfile
|
import tempfile
|
||||||
import shutil
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from pytest import raises
|
from pytest import raises
|
||||||
from hscommon import io
|
|
||||||
from hscommon.path import Path
|
|
||||||
from hscommon.testutil import eq_
|
from hscommon.testutil import eq_
|
||||||
|
|
||||||
from ..directories import *
|
from ..directories import *
|
||||||
|
|
||||||
def create_fake_fs(rootpath):
|
def create_fake_fs(rootpath):
|
||||||
# We have it as a separate function because other units are using it.
|
# We have it as a separate function because other units are using it.
|
||||||
rootpath = rootpath + 'fs'
|
rootpath = rootpath['fs']
|
||||||
io.mkdir(rootpath)
|
rootpath.mkdir()
|
||||||
io.mkdir(rootpath + 'dir1')
|
rootpath['dir1'].mkdir()
|
||||||
io.mkdir(rootpath + 'dir2')
|
rootpath['dir2'].mkdir()
|
||||||
io.mkdir(rootpath + 'dir3')
|
rootpath['dir3'].mkdir()
|
||||||
fp = io.open(rootpath + 'file1.test', 'w')
|
with rootpath['file1.test'].open('wt') as fp:
|
||||||
fp.write('1')
|
fp.write('1')
|
||||||
fp.close()
|
with rootpath['file2.test'].open('wt') as fp:
|
||||||
fp = io.open(rootpath + 'file2.test', 'w')
|
fp.write('12')
|
||||||
fp.write('12')
|
with rootpath['file3.test'].open('wt') as fp:
|
||||||
fp.close()
|
fp.write('123')
|
||||||
fp = io.open(rootpath + 'file3.test', 'w')
|
with rootpath['dir1/file1.test'].open('wt') as fp:
|
||||||
fp.write('123')
|
fp.write('1')
|
||||||
fp.close()
|
with rootpath['dir2/file2.test'].open('wt') as fp:
|
||||||
fp = io.open(rootpath + ('dir1', 'file1.test'), 'w')
|
fp.write('12')
|
||||||
fp.write('1')
|
with rootpath['dir3/file3.test'].open('wt') as fp:
|
||||||
fp.close()
|
fp.write('123')
|
||||||
fp = io.open(rootpath + ('dir2', 'file2.test'), 'w')
|
|
||||||
fp.write('12')
|
|
||||||
fp.close()
|
|
||||||
fp = io.open(rootpath + ('dir3', 'file3.test'), 'w')
|
|
||||||
fp.write('123')
|
|
||||||
fp.close()
|
|
||||||
return rootpath
|
return rootpath
|
||||||
|
|
||||||
def setup_module(module):
|
def setup_module(module):
|
||||||
@ -50,11 +43,10 @@ def setup_module(module):
|
|||||||
# and another with a more complex structure.
|
# and another with a more complex structure.
|
||||||
testpath = Path(tempfile.mkdtemp())
|
testpath = Path(tempfile.mkdtemp())
|
||||||
module.testpath = testpath
|
module.testpath = testpath
|
||||||
rootpath = testpath + 'onefile'
|
rootpath = testpath['onefile']
|
||||||
io.mkdir(rootpath)
|
rootpath.mkdir()
|
||||||
fp = io.open(rootpath + 'test.txt', 'w')
|
with rootpath['test.txt'].open('wt') as fp:
|
||||||
fp.write('test_data')
|
fp.write('test_data')
|
||||||
fp.close()
|
|
||||||
create_fake_fs(testpath)
|
create_fake_fs(testpath)
|
||||||
|
|
||||||
def teardown_module(module):
|
def teardown_module(module):
|
||||||
@ -67,30 +59,30 @@ def test_empty():
|
|||||||
|
|
||||||
def test_add_path():
|
def test_add_path():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath + 'onefile'
|
p = testpath['onefile']
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
eq_(1,len(d))
|
eq_(1,len(d))
|
||||||
assert p in d
|
assert p in d
|
||||||
assert (p + 'foobar') in d
|
assert p['foobar'] in d
|
||||||
assert p[:-1] not in d
|
assert p.parent() not in d
|
||||||
p = testpath + 'fs'
|
p = testpath['fs']
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
eq_(2,len(d))
|
eq_(2,len(d))
|
||||||
assert p in d
|
assert p in d
|
||||||
|
|
||||||
def test_AddPath_when_path_is_already_there():
|
def test_AddPath_when_path_is_already_there():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath + 'onefile'
|
p = testpath['onefile']
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
with raises(AlreadyThereError):
|
with raises(AlreadyThereError):
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
with raises(AlreadyThereError):
|
with raises(AlreadyThereError):
|
||||||
d.add_path(p + 'foobar')
|
d.add_path(p['foobar'])
|
||||||
eq_(1, len(d))
|
eq_(1, len(d))
|
||||||
|
|
||||||
def test_add_path_containing_paths_already_there():
|
def test_add_path_containing_paths_already_there():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
d.add_path(testpath + 'onefile')
|
d.add_path(testpath['onefile'])
|
||||||
eq_(1, len(d))
|
eq_(1, len(d))
|
||||||
d.add_path(testpath)
|
d.add_path(testpath)
|
||||||
eq_(len(d), 1)
|
eq_(len(d), 1)
|
||||||
@ -98,7 +90,7 @@ def test_add_path_containing_paths_already_there():
|
|||||||
|
|
||||||
def test_AddPath_non_latin(tmpdir):
|
def test_AddPath_non_latin(tmpdir):
|
||||||
p = Path(str(tmpdir))
|
p = Path(str(tmpdir))
|
||||||
to_add = p + 'unicode\u201a'
|
to_add = p['unicode\u201a']
|
||||||
os.mkdir(str(to_add))
|
os.mkdir(str(to_add))
|
||||||
d = Directories()
|
d = Directories()
|
||||||
try:
|
try:
|
||||||
@ -108,24 +100,24 @@ def test_AddPath_non_latin(tmpdir):
|
|||||||
|
|
||||||
def test_del():
|
def test_del():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
d.add_path(testpath + 'onefile')
|
d.add_path(testpath['onefile'])
|
||||||
try:
|
try:
|
||||||
del d[1]
|
del d[1]
|
||||||
assert False
|
assert False
|
||||||
except IndexError:
|
except IndexError:
|
||||||
pass
|
pass
|
||||||
d.add_path(testpath + 'fs')
|
d.add_path(testpath['fs'])
|
||||||
del d[1]
|
del d[1]
|
||||||
eq_(1, len(d))
|
eq_(1, len(d))
|
||||||
|
|
||||||
def test_states():
|
def test_states():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath + 'onefile'
|
p = testpath['onefile']
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
eq_(DirectoryState.Normal ,d.get_state(p))
|
eq_(DirectoryState.Normal ,d.get_state(p))
|
||||||
d.set_state(p, DirectoryState.Reference)
|
d.set_state(p, DirectoryState.Reference)
|
||||||
eq_(DirectoryState.Reference ,d.get_state(p))
|
eq_(DirectoryState.Reference ,d.get_state(p))
|
||||||
eq_(DirectoryState.Reference ,d.get_state(p + 'dir1'))
|
eq_(DirectoryState.Reference ,d.get_state(p['dir1']))
|
||||||
eq_(1,len(d.states))
|
eq_(1,len(d.states))
|
||||||
eq_(p,list(d.states.keys())[0])
|
eq_(p,list(d.states.keys())[0])
|
||||||
eq_(DirectoryState.Reference ,d.states[p])
|
eq_(DirectoryState.Reference ,d.states[p])
|
||||||
@ -133,67 +125,67 @@ def test_states():
|
|||||||
def test_get_state_with_path_not_there():
|
def test_get_state_with_path_not_there():
|
||||||
# When the path's not there, just return DirectoryState.Normal
|
# When the path's not there, just return DirectoryState.Normal
|
||||||
d = Directories()
|
d = Directories()
|
||||||
d.add_path(testpath + 'onefile')
|
d.add_path(testpath['onefile'])
|
||||||
eq_(d.get_state(testpath), DirectoryState.Normal)
|
eq_(d.get_state(testpath), DirectoryState.Normal)
|
||||||
|
|
||||||
def test_states_remain_when_larger_directory_eat_smaller_ones():
|
def test_states_remain_when_larger_directory_eat_smaller_ones():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath + 'onefile'
|
p = testpath['onefile']
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
d.set_state(p, DirectoryState.Excluded)
|
d.set_state(p, DirectoryState.Excluded)
|
||||||
d.add_path(testpath)
|
d.add_path(testpath)
|
||||||
d.set_state(testpath, DirectoryState.Reference)
|
d.set_state(testpath, DirectoryState.Reference)
|
||||||
eq_(DirectoryState.Excluded ,d.get_state(p))
|
eq_(DirectoryState.Excluded, d.get_state(p))
|
||||||
eq_(DirectoryState.Excluded ,d.get_state(p + 'dir1'))
|
eq_(DirectoryState.Excluded, d.get_state(p['dir1']))
|
||||||
eq_(DirectoryState.Reference ,d.get_state(testpath))
|
eq_(DirectoryState.Reference, d.get_state(testpath))
|
||||||
|
|
||||||
def test_set_state_keep_state_dict_size_to_minimum():
|
def test_set_state_keep_state_dict_size_to_minimum():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath + 'fs'
|
p = testpath['fs']
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
d.set_state(p, DirectoryState.Reference)
|
d.set_state(p, DirectoryState.Reference)
|
||||||
d.set_state(p + 'dir1', DirectoryState.Reference)
|
d.set_state(p['dir1'], DirectoryState.Reference)
|
||||||
eq_(1,len(d.states))
|
eq_(1,len(d.states))
|
||||||
eq_(DirectoryState.Reference ,d.get_state(p + 'dir1'))
|
eq_(DirectoryState.Reference ,d.get_state(p['dir1']))
|
||||||
d.set_state(p + 'dir1', DirectoryState.Normal)
|
d.set_state(p['dir1'], DirectoryState.Normal)
|
||||||
eq_(2,len(d.states))
|
eq_(2,len(d.states))
|
||||||
eq_(DirectoryState.Normal ,d.get_state(p + 'dir1'))
|
eq_(DirectoryState.Normal ,d.get_state(p['dir1']))
|
||||||
d.set_state(p + 'dir1', DirectoryState.Reference)
|
d.set_state(p['dir1'], DirectoryState.Reference)
|
||||||
eq_(1,len(d.states))
|
eq_(1,len(d.states))
|
||||||
eq_(DirectoryState.Reference ,d.get_state(p + 'dir1'))
|
eq_(DirectoryState.Reference ,d.get_state(p['dir1']))
|
||||||
|
|
||||||
def test_get_files():
|
def test_get_files():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath + 'fs'
|
p = testpath['fs']
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
d.set_state(p + 'dir1', DirectoryState.Reference)
|
d.set_state(p['dir1'], DirectoryState.Reference)
|
||||||
d.set_state(p + 'dir2', DirectoryState.Excluded)
|
d.set_state(p['dir2'], DirectoryState.Excluded)
|
||||||
files = list(d.get_files())
|
files = list(d.get_files())
|
||||||
eq_(5, len(files))
|
eq_(5, len(files))
|
||||||
for f in files:
|
for f in files:
|
||||||
if f.path[:-1] == p + 'dir1':
|
if f.path.parent() == p['dir1']:
|
||||||
assert f.is_ref
|
assert f.is_ref
|
||||||
else:
|
else:
|
||||||
assert not f.is_ref
|
assert not f.is_ref
|
||||||
|
|
||||||
def test_get_folders():
|
def test_get_folders():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath + 'fs'
|
p = testpath['fs']
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
d.set_state(p + 'dir1', DirectoryState.Reference)
|
d.set_state(p['dir1'], DirectoryState.Reference)
|
||||||
d.set_state(p + 'dir2', DirectoryState.Excluded)
|
d.set_state(p['dir2'], DirectoryState.Excluded)
|
||||||
folders = list(d.get_folders())
|
folders = list(d.get_folders())
|
||||||
eq_(len(folders), 3)
|
eq_(len(folders), 3)
|
||||||
ref = [f for f in folders if f.is_ref]
|
ref = [f for f in folders if f.is_ref]
|
||||||
not_ref = [f for f in folders if not f.is_ref]
|
not_ref = [f for f in folders if not f.is_ref]
|
||||||
eq_(len(ref), 1)
|
eq_(len(ref), 1)
|
||||||
eq_(ref[0].path, p + 'dir1')
|
eq_(ref[0].path, p['dir1'])
|
||||||
eq_(len(not_ref), 2)
|
eq_(len(not_ref), 2)
|
||||||
eq_(ref[0].size, 1)
|
eq_(ref[0].size, 1)
|
||||||
|
|
||||||
def test_get_files_with_inherited_exclusion():
|
def test_get_files_with_inherited_exclusion():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath + 'onefile'
|
p = testpath['onefile']
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
d.set_state(p, DirectoryState.Excluded)
|
d.set_state(p, DirectoryState.Excluded)
|
||||||
eq_([], list(d.get_files()))
|
eq_([], list(d.get_files()))
|
||||||
@ -202,19 +194,19 @@ def test_save_and_load(tmpdir):
|
|||||||
d1 = Directories()
|
d1 = Directories()
|
||||||
d2 = Directories()
|
d2 = Directories()
|
||||||
p1 = Path(str(tmpdir.join('p1')))
|
p1 = Path(str(tmpdir.join('p1')))
|
||||||
io.mkdir(p1)
|
p1.mkdir()
|
||||||
p2 = Path(str(tmpdir.join('p2')))
|
p2 = Path(str(tmpdir.join('p2')))
|
||||||
io.mkdir(p2)
|
p2.mkdir()
|
||||||
d1.add_path(p1)
|
d1.add_path(p1)
|
||||||
d1.add_path(p2)
|
d1.add_path(p2)
|
||||||
d1.set_state(p1, DirectoryState.Reference)
|
d1.set_state(p1, DirectoryState.Reference)
|
||||||
d1.set_state(p1 + 'dir1', DirectoryState.Excluded)
|
d1.set_state(p1['dir1'], DirectoryState.Excluded)
|
||||||
tmpxml = str(tmpdir.join('directories_testunit.xml'))
|
tmpxml = str(tmpdir.join('directories_testunit.xml'))
|
||||||
d1.save_to_file(tmpxml)
|
d1.save_to_file(tmpxml)
|
||||||
d2.load_from_file(tmpxml)
|
d2.load_from_file(tmpxml)
|
||||||
eq_(2, len(d2))
|
eq_(2, len(d2))
|
||||||
eq_(DirectoryState.Reference ,d2.get_state(p1))
|
eq_(DirectoryState.Reference ,d2.get_state(p1))
|
||||||
eq_(DirectoryState.Excluded ,d2.get_state(p1 + 'dir1'))
|
eq_(DirectoryState.Excluded ,d2.get_state(p1['dir1']))
|
||||||
|
|
||||||
def test_invalid_path():
|
def test_invalid_path():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
@ -234,12 +226,12 @@ def test_load_from_file_with_invalid_path(tmpdir):
|
|||||||
#This test simulates a load from file resulting in a
|
#This test simulates a load from file resulting in a
|
||||||
#InvalidPath raise. Other directories must be loaded.
|
#InvalidPath raise. Other directories must be loaded.
|
||||||
d1 = Directories()
|
d1 = Directories()
|
||||||
d1.add_path(testpath + 'onefile')
|
d1.add_path(testpath['onefile'])
|
||||||
#Will raise InvalidPath upon loading
|
#Will raise InvalidPath upon loading
|
||||||
p = Path(str(tmpdir.join('toremove')))
|
p = Path(str(tmpdir.join('toremove')))
|
||||||
io.mkdir(p)
|
p.mkdir()
|
||||||
d1.add_path(p)
|
d1.add_path(p)
|
||||||
io.rmdir(p)
|
p.rmdir()
|
||||||
tmpxml = str(tmpdir.join('directories_testunit.xml'))
|
tmpxml = str(tmpdir.join('directories_testunit.xml'))
|
||||||
d1.save_to_file(tmpxml)
|
d1.save_to_file(tmpxml)
|
||||||
d2 = Directories()
|
d2 = Directories()
|
||||||
@ -248,11 +240,11 @@ def test_load_from_file_with_invalid_path(tmpdir):
|
|||||||
|
|
||||||
def test_unicode_save(tmpdir):
|
def test_unicode_save(tmpdir):
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p1 = Path(str(tmpdir)) + 'hello\xe9'
|
p1 = Path(str(tmpdir))['hello\xe9']
|
||||||
io.mkdir(p1)
|
p1.mkdir()
|
||||||
io.mkdir(p1 + 'foo\xe9')
|
p1['foo\xe9'].mkdir()
|
||||||
d.add_path(p1)
|
d.add_path(p1)
|
||||||
d.set_state(p1 + 'foo\xe9', DirectoryState.Excluded)
|
d.set_state(p1['foo\xe9'], DirectoryState.Excluded)
|
||||||
tmpxml = str(tmpdir.join('directories_testunit.xml'))
|
tmpxml = str(tmpdir.join('directories_testunit.xml'))
|
||||||
try:
|
try:
|
||||||
d.save_to_file(tmpxml)
|
d.save_to_file(tmpxml)
|
||||||
@ -261,12 +253,12 @@ def test_unicode_save(tmpdir):
|
|||||||
|
|
||||||
def test_get_files_refreshes_its_directories():
|
def test_get_files_refreshes_its_directories():
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = testpath + 'fs'
|
p = testpath['fs']
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
files = d.get_files()
|
files = d.get_files()
|
||||||
eq_(6, len(list(files)))
|
eq_(6, len(list(files)))
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
os.remove(str(p + ('dir1','file1.test')))
|
os.remove(str(p['dir1']['file1.test']))
|
||||||
files = d.get_files()
|
files = d.get_files()
|
||||||
eq_(5, len(list(files)))
|
eq_(5, len(list(files)))
|
||||||
|
|
||||||
@ -274,14 +266,14 @@ def test_get_files_does_not_choke_on_non_existing_directories(tmpdir):
|
|||||||
d = Directories()
|
d = Directories()
|
||||||
p = Path(str(tmpdir))
|
p = Path(str(tmpdir))
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
io.rmtree(p)
|
shutil.rmtree(str(p))
|
||||||
eq_([], list(d.get_files()))
|
eq_([], list(d.get_files()))
|
||||||
|
|
||||||
def test_get_state_returns_excluded_by_default_for_hidden_directories(tmpdir):
|
def test_get_state_returns_excluded_by_default_for_hidden_directories(tmpdir):
|
||||||
d = Directories()
|
d = Directories()
|
||||||
p = Path(str(tmpdir))
|
p = Path(str(tmpdir))
|
||||||
hidden_dir_path = p + '.foo'
|
hidden_dir_path = p['.foo']
|
||||||
io.mkdir(p + '.foo')
|
p['.foo'].mkdir()
|
||||||
d.add_path(p)
|
d.add_path(p)
|
||||||
eq_(d.get_state(hidden_dir_path), DirectoryState.Excluded)
|
eq_(d.get_state(hidden_dir_path), DirectoryState.Excluded)
|
||||||
# But it can be overriden
|
# But it can be overriden
|
||||||
@ -292,21 +284,21 @@ def test_default_path_state_override(tmpdir):
|
|||||||
# It's possible for a subclass to override the default state of a path
|
# It's possible for a subclass to override the default state of a path
|
||||||
class MyDirectories(Directories):
|
class MyDirectories(Directories):
|
||||||
def _default_state_for_path(self, path):
|
def _default_state_for_path(self, path):
|
||||||
if 'foobar' in path:
|
if 'foobar' in path.parts:
|
||||||
return DirectoryState.Excluded
|
return DirectoryState.Excluded
|
||||||
|
|
||||||
d = MyDirectories()
|
d = MyDirectories()
|
||||||
p1 = Path(str(tmpdir))
|
p1 = Path(str(tmpdir))
|
||||||
io.mkdir(p1 + 'foobar')
|
p1['foobar'].mkdir()
|
||||||
io.open(p1 + 'foobar/somefile', 'w').close()
|
p1['foobar']['somefile'].touch()
|
||||||
io.mkdir(p1 + 'foobaz')
|
p1['foobaz'].mkdir()
|
||||||
io.open(p1 + 'foobaz/somefile', 'w').close()
|
p1['foobaz']['somefile'].touch()
|
||||||
d.add_path(p1)
|
d.add_path(p1)
|
||||||
eq_(d.get_state(p1 + 'foobaz'), DirectoryState.Normal)
|
eq_(d.get_state(p1['foobaz']), DirectoryState.Normal)
|
||||||
eq_(d.get_state(p1 + 'foobar'), DirectoryState.Excluded)
|
eq_(d.get_state(p1['foobar']), DirectoryState.Excluded)
|
||||||
eq_(len(list(d.get_files())), 1) # only the 'foobaz' file is there
|
eq_(len(list(d.get_files())), 1) # only the 'foobaz' file is there
|
||||||
# However, the default state can be changed
|
# However, the default state can be changed
|
||||||
d.set_state(p1 + 'foobar', DirectoryState.Normal)
|
d.set_state(p1['foobar'], DirectoryState.Normal)
|
||||||
eq_(d.get_state(p1 + 'foobar'), DirectoryState.Normal)
|
eq_(d.get_state(p1['foobar']), DirectoryState.Normal)
|
||||||
eq_(len(list(d.get_files())), 2)
|
eq_(len(list(d.get_files())), 2)
|
||||||
|
|
||||||
|
@ -7,8 +7,8 @@
|
|||||||
# http://www.hardcoded.net/licenses/bsd_license
|
# http://www.hardcoded.net/licenses/bsd_license
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from hscommon.path import Path
|
|
||||||
from hscommon.testutil import eq_
|
from hscommon.testutil import eq_
|
||||||
from core.tests.directories_test import create_fake_fs
|
from core.tests.directories_test import create_fake_fs
|
||||||
|
|
||||||
@ -25,12 +25,12 @@ def test_md5_aggregate_subfiles_sorted(tmpdir):
|
|||||||
#same order everytime.
|
#same order everytime.
|
||||||
p = create_fake_fs(Path(str(tmpdir)))
|
p = create_fake_fs(Path(str(tmpdir)))
|
||||||
b = fs.Folder(p)
|
b = fs.Folder(p)
|
||||||
md51 = fs.File(p + ('dir1', 'file1.test')).md5
|
md51 = fs.File(p['dir1']['file1.test']).md5
|
||||||
md52 = fs.File(p + ('dir2', 'file2.test')).md5
|
md52 = fs.File(p['dir2']['file2.test']).md5
|
||||||
md53 = fs.File(p + ('dir3', 'file3.test')).md5
|
md53 = fs.File(p['dir3']['file3.test']).md5
|
||||||
md54 = fs.File(p + 'file1.test').md5
|
md54 = fs.File(p['file1.test']).md5
|
||||||
md55 = fs.File(p + 'file2.test').md5
|
md55 = fs.File(p['file2.test']).md5
|
||||||
md56 = fs.File(p + 'file3.test').md5
|
md56 = fs.File(p['file3.test']).md5
|
||||||
# The expected md5 is the md5 of md5s for folders and the direct md5 for files
|
# The expected md5 is the md5 of md5s for folders and the direct md5 for files
|
||||||
folder_md51 = hashlib.md5(md51).digest()
|
folder_md51 = hashlib.md5(md51).digest()
|
||||||
folder_md52 = hashlib.md5(md52).digest()
|
folder_md52 = hashlib.md5(md52).digest()
|
||||||
|
@ -429,7 +429,7 @@ class TestCaseResultsXML:
|
|||||||
self.results.groups = self.groups
|
self.results.groups = self.groups
|
||||||
|
|
||||||
def get_file(self, path): # use this as a callback for load_from_xml
|
def get_file(self, path): # use this as a callback for load_from_xml
|
||||||
return [o for o in self.objects if o.path == path][0]
|
return [o for o in self.objects if str(o.path) == path][0]
|
||||||
|
|
||||||
def test_save_to_xml(self):
|
def test_save_to_xml(self):
|
||||||
self.objects[0].is_ref = True
|
self.objects[0].is_ref = True
|
||||||
|
@ -7,8 +7,7 @@
|
|||||||
# http://www.hardcoded.net/licenses/bsd_license
|
# http://www.hardcoded.net/licenses/bsd_license
|
||||||
|
|
||||||
from jobprogress import job
|
from jobprogress import job
|
||||||
from hscommon import io
|
from pathlib import Path
|
||||||
from hscommon.path import Path
|
|
||||||
from hscommon.testutil import eq_
|
from hscommon.testutil import eq_
|
||||||
|
|
||||||
from .. import fs
|
from .. import fs
|
||||||
@ -21,7 +20,7 @@ class NamedObject:
|
|||||||
if path is None:
|
if path is None:
|
||||||
path = Path(name)
|
path = Path(name)
|
||||||
else:
|
else:
|
||||||
path = Path(path) + name
|
path = Path(path, name)
|
||||||
self.name = name
|
self.name = name
|
||||||
self.size = size
|
self.size = size
|
||||||
self.path = path
|
self.path = path
|
||||||
@ -37,7 +36,6 @@ def pytest_funcarg__fake_fileexists(request):
|
|||||||
# This is a hack to avoid invalidating all previous tests since the scanner started to test
|
# This is a hack to avoid invalidating all previous tests since the scanner started to test
|
||||||
# for file existence before doing the match grouping.
|
# for file existence before doing the match grouping.
|
||||||
monkeypatch = request.getfuncargvalue('monkeypatch')
|
monkeypatch = request.getfuncargvalue('monkeypatch')
|
||||||
monkeypatch.setattr(io, 'exists', lambda _: True)
|
|
||||||
monkeypatch.setattr(Path, 'exists', lambda _: True)
|
monkeypatch.setattr(Path, 'exists', lambda _: True)
|
||||||
|
|
||||||
def test_empty(fake_fileexists):
|
def test_empty(fake_fileexists):
|
||||||
@ -471,11 +469,11 @@ def test_dont_group_files_that_dont_exist(tmpdir):
|
|||||||
s = Scanner()
|
s = Scanner()
|
||||||
s.scan_type = ScanType.Contents
|
s.scan_type = ScanType.Contents
|
||||||
p = Path(str(tmpdir))
|
p = Path(str(tmpdir))
|
||||||
io.open(p + 'file1', 'w').write('foo')
|
p['file1'].touch()
|
||||||
io.open(p + 'file2', 'w').write('foo')
|
p['file2'].touch()
|
||||||
file1, file2 = fs.get_files(p)
|
file1, file2 = fs.get_files(p)
|
||||||
def getmatches(*args, **kw):
|
def getmatches(*args, **kw):
|
||||||
io.remove(file2.path)
|
file2.path.unlink()
|
||||||
return [Match(file1, file2, 100)]
|
return [Match(file1, file2, 100)]
|
||||||
s._getmatches = getmatches
|
s._getmatches = getmatches
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user