Compare commits

...

5 Commits

Author SHA1 Message Date
Andrew Senetar a37b5b0eeb
Fix #988 2022-03-30 01:06:51 -05:00
Andrew Senetar efd500ecc1
Update directory scanning to use os.scandir()
- Change to use os.scandir() instead of os.walk() to leverage DirEntry objects.
- Avoids extra calls to stat() on files during fs.can_handle()
- See 3x speed improvement on Windows in some cases
2022-03-29 23:37:56 -05:00
Andrew Senetar 43fcc52291
Replace pathlib.glob() with os.scandir() in fs.py 2022-03-29 22:35:38 -05:00
Andrew Senetar 50f5db1543
Update fs to support DirEntry on get_file() 2022-03-29 22:32:36 -05:00
Andrew Senetar a5b0ccdd02
Improve performance of Directories.get_state() 2022-03-29 21:48:14 -05:00
3 changed files with 55 additions and 56 deletions

View File

@ -248,7 +248,7 @@ class DupeGuru(Broadcaster):
ref = group.ref
linkfunc = os.link if use_hardlinks else os.symlink
linkfunc(str(ref.path), str_path)
self.clean_empty_dirs(dupe.path.parent())
self.clean_empty_dirs(dupe.path.parent)
def _create_file(self, path):
# We add fs.Folder to fileclasses in case the file we're loading contains folder paths.

View File

@ -90,47 +90,45 @@ class Directories:
return DirectoryState.EXCLUDED
def _get_files(self, from_path, fileclasses, j):
for root, dirs, files in os.walk(str(from_path)):
j.check_if_cancelled()
root_path = Path(root)
state = self.get_state(root_path)
if state == DirectoryState.EXCLUDED and not any(
p.parts[: len(root_path.parts)] == root_path.parts for p in self.states
):
# Recursively get files from folders with lots of subfolder is expensive. However, there
# might be a subfolder in this path that is not excluded. What we want to do is to skim
# through self.states and see if we must continue, or we can stop right here to save time
del dirs[:]
try:
if state != DirectoryState.EXCLUDED:
# Old logic
if self._exclude_list is None or not self._exclude_list.mark_count:
found_files = [fs.get_file(root_path.joinpath(f), fileclasses=fileclasses) for f in files]
else:
found_files = []
# print(f"len of files: {len(files)} {files}")
for f in files:
if not self._exclude_list.is_excluded(root, f):
found_files.append(fs.get_file(root_path.joinpath(f), fileclasses=fileclasses))
found_files = [f for f in found_files if f is not None]
# In some cases, directories can be considered as files by dupeGuru, which is
# why we have this line below. In fact, there only one case: Bundle files under
# OS X... In other situations, this forloop will do nothing.
for d in dirs[:]:
f = fs.get_file(root_path.joinpath(d), fileclasses=fileclasses)
if f is not None:
found_files.append(f)
dirs.remove(d)
logging.debug(
"Collected %d files in folder %s",
len(found_files),
str(root_path),
)
for file in found_files:
file.is_ref = state == DirectoryState.REFERENCE
yield file
except (EnvironmentError, fs.InvalidPath):
pass
try:
with os.scandir(from_path) as iter:
root_path = Path(from_path)
state = self.get_state(root_path)
# if we have no un-excluded dirs under this directory skip going deeper
skip_dirs = state == DirectoryState.EXCLUDED and not any(
p.parts[: len(root_path.parts)] == root_path.parts for p in self.states
)
count = 0
for item in iter:
j.check_if_cancelled()
try:
if item.is_dir():
if skip_dirs:
continue
yield from self._get_files(item.path, fileclasses, j)
continue
elif state == DirectoryState.EXCLUDED:
continue
# File excluding or not
if (
self._exclude_list is None
or not self._exclude_list.mark_count
or not self._exclude_list.is_excluded(str(from_path), item.name)
):
file = fs.get_file(item, fileclasses=fileclasses)
if file:
file.is_ref = state == DirectoryState.REFERENCE
count += 1
yield file
except (EnvironmentError, OSError, fs.InvalidPath):
pass
logging.debug(
"Collected %d files in folder %s",
count,
str(root_path),
)
except OSError:
pass
def _get_folders(self, from_folder, j):
j.check_if_cancelled()
@ -222,14 +220,11 @@ class Directories:
if state != DirectoryState.NORMAL:
self.states[path] = state
return state
prevlen = 0
# we loop through the states to find the longest matching prefix
# if the parent has a state in cache, return that state
for p, s in self.states.items():
if p in path.parents and len(p.parts) > prevlen:
prevlen = len(p.parts)
state = s
# find the longest parent path that is in states and return that state if found
# NOTE: path.parents is ordered longest to shortest
for parent_path in path.parents:
if parent_path in self.states:
return self.states[parent_path]
return state
def has_any_file(self):

View File

@ -377,8 +377,9 @@ class Folder(File):
@property
def subfolders(self):
if self._subfolders is None:
subfolders = [p for p in self.path.glob("*") if not p.is_symlink() and p.is_dir()]
self._subfolders = [self.__class__(p) for p in subfolders]
with os.scandir(self.path) as iter:
subfolders = [p.path for p in iter if not p.is_symlink() and p.is_dir()]
self._subfolders = [self.__class__(Path(p)) for p in subfolders]
return self._subfolders
@classmethod
@ -396,6 +397,8 @@ def get_file(path, fileclasses=[File]):
"""
for fileclass in fileclasses:
if fileclass.can_handle(path):
if type(path) is os.DirEntry:
return fileclass(Path(path.path))
return fileclass(path)
@ -408,10 +411,11 @@ def get_files(path, fileclasses=[File]):
assert all(issubclass(fileclass, File) for fileclass in fileclasses)
try:
result = []
for path in path.glob("*"):
file = get_file(path, fileclasses=fileclasses)
if file is not None:
result.append(file)
with os.scandir(path) as iter:
for item in iter:
file = get_file(item, fileclasses=fileclasses)
if file is not None:
result.append(file)
return result
except EnvironmentError:
raise InvalidPath(path)