2009-10-22 15:23:32 +00:00
|
|
|
# Created By: Virgil Dupras
|
|
|
|
# Created On: 2009-10-22
|
2015-01-03 21:30:57 +00:00
|
|
|
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
|
2014-10-13 19:08:59 +00:00
|
|
|
#
|
2015-01-03 21:33:16 +00:00
|
|
|
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
|
2014-10-13 19:08:59 +00:00
|
|
|
# which should be included with this package. The terms are also available at
|
2015-01-03 21:33:16 +00:00
|
|
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
2009-10-22 15:23:32 +00:00
|
|
|
|
|
|
|
# This is a fork from hsfs. The reason for this fork is that hsfs has been designed for musicGuru
|
|
|
|
# and was re-used for dupeGuru. The problem is that hsfs is way over-engineered for dupeGuru,
|
|
|
|
# resulting needless complexity and memory usage. It's been a while since I wanted to do that fork,
|
|
|
|
# and I'm doing it now.
|
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
import os
|
|
|
|
|
2022-04-30 10:16:46 +00:00
|
|
|
from math import floor
|
|
|
|
import logging
|
|
|
|
import sqlite3
|
2024-02-12 17:02:13 +00:00
|
|
|
from sys import platform
|
2022-04-30 10:16:46 +00:00
|
|
|
from threading import Lock
|
|
|
|
from typing import Any, AnyStr, Union, Callable
|
|
|
|
|
|
|
|
from pathlib import Path
|
|
|
|
from hscommon.util import nonone, get_file_ext
|
|
|
|
|
|
|
|
hasher: Callable
|
2022-03-28 03:27:13 +00:00
|
|
|
try:
|
|
|
|
import xxhash
|
|
|
|
|
|
|
|
hasher = xxhash.xxh128
|
|
|
|
except ImportError:
|
|
|
|
import hashlib
|
|
|
|
|
|
|
|
hasher = hashlib.md5
|
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
__all__ = [
|
2020-01-01 02:16:27 +00:00
|
|
|
"File",
|
|
|
|
"Folder",
|
|
|
|
"get_file",
|
|
|
|
"get_files",
|
|
|
|
"FSError",
|
|
|
|
"AlreadyExistsError",
|
|
|
|
"InvalidPath",
|
|
|
|
"InvalidDestinationError",
|
|
|
|
"OperationError",
|
2013-08-18 22:36:09 +00:00
|
|
|
]
|
|
|
|
|
2012-05-29 21:39:54 +00:00
|
|
|
NOT_SET = object()
|
2009-10-22 15:23:32 +00:00
|
|
|
|
2021-06-21 17:03:21 +00:00
|
|
|
# The goal here is to not run out of memory on really big files. However, the chunk
|
|
|
|
# size has to be large enough so that the python loop isn't too costly in terms of
|
|
|
|
# CPU.
|
|
|
|
CHUNK_SIZE = 1024 * 1024 # 1 MiB
|
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
# Minimum size below which partial hashing is not used
|
2021-08-13 19:33:21 +00:00
|
|
|
MIN_FILE_SIZE = 3 * CHUNK_SIZE # 3MiB, because we take 3 samples
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2023-06-08 06:14:52 +00:00
|
|
|
# Partial hashing offset and size
|
|
|
|
PARTIAL_OFFSET_SIZE = (0x4000, 0x4000)
|
|
|
|
|
2021-08-14 00:52:00 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
class FSError(Exception):
|
|
|
|
cls_message = "An error has occured on '{name}' in '{parent}'"
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
def __init__(self, fsobject, parent=None):
|
|
|
|
message = self.cls_message
|
2010-08-11 14:39:06 +00:00
|
|
|
if isinstance(fsobject, str):
|
2009-10-22 15:23:32 +00:00
|
|
|
name = fsobject
|
|
|
|
elif isinstance(fsobject, File):
|
|
|
|
name = fsobject.name
|
|
|
|
else:
|
2020-01-01 02:16:27 +00:00
|
|
|
name = ""
|
|
|
|
parentname = str(parent) if parent is not None else ""
|
2009-10-22 15:23:32 +00:00
|
|
|
Exception.__init__(self, message.format(name=name, parent=parentname))
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
|
|
|
|
class AlreadyExistsError(FSError):
|
|
|
|
"The directory or file name we're trying to add already exists"
|
|
|
|
cls_message = "'{name}' already exists in '{parent}'"
|
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
class InvalidPath(FSError):
|
|
|
|
"The path of self is invalid, and cannot be worked with."
|
|
|
|
cls_message = "'{name}' is invalid."
|
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
class InvalidDestinationError(FSError):
|
|
|
|
"""A copy/move operation has been called, but the destination is invalid."""
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
cls_message = "'{name}' is an invalid destination for this operation."
|
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
class OperationError(FSError):
|
2014-10-13 19:08:59 +00:00
|
|
|
"""A copy/move/delete operation has been called, but the checkup after the
|
2009-10-22 15:23:32 +00:00
|
|
|
operation shows that it didn't work."""
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
cls_message = "Operation on '{name}' failed."
|
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2021-10-29 04:22:12 +00:00
|
|
|
class FilesDB:
|
2022-03-28 03:27:13 +00:00
|
|
|
schema_version = 1
|
|
|
|
schema_version_description = "Changed from md5 to xxhash if available."
|
2021-10-29 04:22:12 +00:00
|
|
|
|
2023-01-10 04:35:12 +00:00
|
|
|
create_table_query = """CREATE TABLE IF NOT EXISTS files (path TEXT PRIMARY KEY, size INTEGER, mtime_ns INTEGER,
|
|
|
|
entry_dt DATETIME, digest BLOB, digest_partial BLOB, digest_samples BLOB)"""
|
2022-03-28 03:27:13 +00:00
|
|
|
drop_table_query = "DROP TABLE IF EXISTS files;"
|
2021-10-29 04:22:12 +00:00
|
|
|
select_query = "SELECT {key} FROM files WHERE path=:path AND size=:size and mtime_ns=:mtime_ns"
|
2023-01-06 05:01:16 +00:00
|
|
|
select_query_ignore_mtime = "SELECT {key} FROM files WHERE path=:path AND size=:size"
|
2021-10-29 04:22:12 +00:00
|
|
|
insert_query = """
|
2023-01-10 04:35:12 +00:00
|
|
|
INSERT INTO files (path, size, mtime_ns, entry_dt, {key})
|
|
|
|
VALUES (:path, :size, :mtime_ns, datetime('now'), :value)
|
2021-10-29 04:22:12 +00:00
|
|
|
ON CONFLICT(path) DO UPDATE SET size=:size, mtime_ns=:mtime_ns, entry_dt=datetime('now'), {key}=:value;
|
|
|
|
"""
|
|
|
|
|
2023-01-06 05:01:16 +00:00
|
|
|
ignore_mtime = False
|
|
|
|
|
2021-10-29 04:22:12 +00:00
|
|
|
def __init__(self):
|
|
|
|
self.conn = None
|
|
|
|
self.lock = None
|
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
def connect(self, path: Union[AnyStr, os.PathLike]) -> None:
|
2024-02-12 17:02:13 +00:00
|
|
|
if platform.startswith("gnu0"):
|
|
|
|
self.conn = sqlite3.connect(path, check_same_thread=False, isolation_level=None)
|
|
|
|
else:
|
|
|
|
self.conn = sqlite3.connect(path, check_same_thread=False)
|
2021-10-29 04:22:12 +00:00
|
|
|
self.lock = Lock()
|
2022-03-28 03:27:13 +00:00
|
|
|
self._check_upgrade()
|
2021-10-29 04:22:12 +00:00
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
def _check_upgrade(self) -> None:
|
2023-01-11 06:58:29 +00:00
|
|
|
with self.lock, self.conn as conn:
|
|
|
|
has_schema = conn.execute(
|
2022-03-28 03:27:13 +00:00
|
|
|
"SELECT NAME FROM sqlite_master WHERE type='table' AND name='schema_version'"
|
|
|
|
).fetchall()
|
|
|
|
version = None
|
|
|
|
if has_schema:
|
2023-01-11 06:58:29 +00:00
|
|
|
version = conn.execute("SELECT version FROM schema_version ORDER BY version DESC").fetchone()[0]
|
2022-03-28 03:27:13 +00:00
|
|
|
else:
|
2023-01-11 06:58:29 +00:00
|
|
|
conn.execute("CREATE TABLE schema_version (version int PRIMARY KEY, description TEXT)")
|
2022-03-28 03:27:13 +00:00
|
|
|
if version != self.schema_version:
|
2023-01-11 06:58:29 +00:00
|
|
|
conn.execute(self.drop_table_query)
|
|
|
|
conn.execute(
|
2022-03-28 03:27:13 +00:00
|
|
|
"INSERT OR REPLACE INTO schema_version VALUES (:version, :description)",
|
|
|
|
{"version": self.schema_version, "description": self.schema_version_description},
|
|
|
|
)
|
2023-01-11 06:58:29 +00:00
|
|
|
conn.execute(self.create_table_query)
|
2021-10-29 04:22:12 +00:00
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
def clear(self) -> None:
|
2023-01-11 06:58:29 +00:00
|
|
|
with self.lock, self.conn as conn:
|
|
|
|
conn.execute(self.drop_table_query)
|
|
|
|
conn.execute(self.create_table_query)
|
2021-10-29 04:22:12 +00:00
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
def get(self, path: Path, key: str) -> Union[bytes, None]:
|
2021-10-29 04:22:12 +00:00
|
|
|
stat = path.stat()
|
|
|
|
size = stat.st_size
|
|
|
|
mtime_ns = stat.st_mtime_ns
|
2022-07-08 02:52:22 +00:00
|
|
|
try:
|
2023-01-11 06:58:29 +00:00
|
|
|
with self.conn as conn:
|
2023-01-06 05:01:16 +00:00
|
|
|
if self.ignore_mtime:
|
2023-01-11 06:58:29 +00:00
|
|
|
cursor = conn.execute(
|
|
|
|
self.select_query_ignore_mtime.format(key=key), {"path": str(path), "size": size}
|
|
|
|
)
|
2023-01-06 05:01:16 +00:00
|
|
|
else:
|
2023-01-11 06:58:29 +00:00
|
|
|
cursor = conn.execute(
|
2023-01-10 04:35:12 +00:00
|
|
|
self.select_query.format(key=key),
|
|
|
|
{"path": str(path), "size": size, "mtime_ns": mtime_ns},
|
2023-01-06 05:01:16 +00:00
|
|
|
)
|
2023-01-11 06:58:29 +00:00
|
|
|
result = cursor.fetchone()
|
|
|
|
cursor.close()
|
2021-10-29 04:22:12 +00:00
|
|
|
|
2022-07-08 02:52:22 +00:00
|
|
|
if result:
|
|
|
|
return result[0]
|
|
|
|
except Exception as ex:
|
|
|
|
logging.warning(f"Couldn't get {key} for {path} w/{size}, {mtime_ns}: {ex}")
|
2021-10-29 04:22:12 +00:00
|
|
|
|
|
|
|
return None
|
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
def put(self, path: Path, key: str, value: Any) -> None:
|
2021-10-29 04:22:12 +00:00
|
|
|
stat = path.stat()
|
|
|
|
size = stat.st_size
|
|
|
|
mtime_ns = stat.st_mtime_ns
|
2022-07-08 02:52:22 +00:00
|
|
|
try:
|
2023-01-11 06:58:29 +00:00
|
|
|
with self.lock, self.conn as conn:
|
|
|
|
conn.execute(
|
2022-07-08 02:52:22 +00:00
|
|
|
self.insert_query.format(key=key),
|
|
|
|
{"path": str(path), "size": size, "mtime_ns": mtime_ns, "value": value},
|
|
|
|
)
|
|
|
|
except Exception as ex:
|
|
|
|
logging.warning(f"Couldn't put {key} for {path} w/{size}, {mtime_ns}: {ex}")
|
2021-10-29 04:22:12 +00:00
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
def commit(self) -> None:
|
2021-10-29 04:22:12 +00:00
|
|
|
with self.lock:
|
|
|
|
self.conn.commit()
|
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
def close(self) -> None:
|
2021-10-29 04:22:12 +00:00
|
|
|
with self.lock:
|
|
|
|
self.conn.close()
|
|
|
|
|
|
|
|
|
|
|
|
filesdb = FilesDB() # Singleton
|
|
|
|
|
|
|
|
|
2011-01-11 10:59:53 +00:00
|
|
|
class File:
|
2021-08-15 09:10:18 +00:00
|
|
|
"""Represents a file and holds metadata to be used for scanning."""
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
INITIAL_INFO = {"size": 0, "mtime": 0, "digest": b"", "digest_partial": b"", "digest_samples": b""}
|
2012-05-29 21:39:54 +00:00
|
|
|
# Slots for File make us save quite a bit of memory. In a memory test I've made with a lot of
|
|
|
|
# files, I saved 35% memory usage with "unread" files (no _read_info() call) and gains become
|
|
|
|
# even greater when we take into account read attributes (70%!). Yeah, it's worth it.
|
2024-02-19 15:19:33 +00:00
|
|
|
__slots__ = ("path", "unicode_path", "is_ref", "words") + tuple(INITIAL_INFO.keys())
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2021-10-29 04:22:12 +00:00
|
|
|
def __init__(self, path):
|
2012-05-29 21:39:54 +00:00
|
|
|
for attrname in self.INITIAL_INFO:
|
|
|
|
setattr(self, attrname, NOT_SET)
|
2022-03-31 03:58:01 +00:00
|
|
|
if type(path) is os.DirEntry:
|
|
|
|
self.path = Path(path.path)
|
|
|
|
self.size = nonone(path.stat().st_size, 0)
|
|
|
|
self.mtime = nonone(path.stat().st_mtime, 0)
|
|
|
|
else:
|
|
|
|
self.path = path
|
2024-02-19 15:19:33 +00:00
|
|
|
if self.path:
|
|
|
|
self.unicode_path = str(self.path)
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2011-04-12 11:22:29 +00:00
|
|
|
def __repr__(self):
|
2022-04-28 01:53:12 +00:00
|
|
|
return f"<{self.__class__.__name__} {str(self.path)}>"
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2012-05-29 21:39:54 +00:00
|
|
|
def __getattribute__(self, attrname):
|
|
|
|
result = object.__getattribute__(self, attrname)
|
|
|
|
if result is NOT_SET:
|
2009-10-22 15:23:32 +00:00
|
|
|
try:
|
|
|
|
self._read_info(attrname)
|
|
|
|
except Exception as e:
|
2021-08-15 09:10:18 +00:00
|
|
|
logging.warning("An error '%s' was raised while decoding '%s'", e, repr(self.path))
|
2012-05-29 21:39:54 +00:00
|
|
|
result = object.__getattribute__(self, attrname)
|
|
|
|
if result is NOT_SET:
|
|
|
|
result = self.INITIAL_INFO[attrname]
|
|
|
|
return result
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
def _calc_digest(self):
|
2021-10-29 04:22:12 +00:00
|
|
|
# type: () -> bytes
|
|
|
|
|
|
|
|
with self.path.open("rb") as fp:
|
2022-03-28 03:27:13 +00:00
|
|
|
file_hash = hasher()
|
2021-10-29 04:22:12 +00:00
|
|
|
# The goal here is to not run out of memory on really big files. However, the chunk
|
|
|
|
# size has to be large enough so that the python loop isn't too costly in terms of
|
|
|
|
# CPU.
|
|
|
|
CHUNK_SIZE = 1024 * 1024 # 1 mb
|
|
|
|
filedata = fp.read(CHUNK_SIZE)
|
|
|
|
while filedata:
|
2022-03-28 03:27:13 +00:00
|
|
|
file_hash.update(filedata)
|
2021-10-29 04:22:12 +00:00
|
|
|
filedata = fp.read(CHUNK_SIZE)
|
2022-03-28 03:27:13 +00:00
|
|
|
return file_hash.digest()
|
2021-10-29 04:22:12 +00:00
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
def _calc_digest_partial(self):
|
2021-10-29 04:22:12 +00:00
|
|
|
# type: () -> bytes
|
|
|
|
with self.path.open("rb") as fp:
|
2023-06-08 06:14:52 +00:00
|
|
|
fp.seek(PARTIAL_OFFSET_SIZE[0])
|
|
|
|
partial_data = fp.read(PARTIAL_OFFSET_SIZE[1])
|
2022-03-28 03:27:13 +00:00
|
|
|
return hasher(partial_data).digest()
|
|
|
|
|
|
|
|
def _calc_digest_samples(self) -> bytes:
|
|
|
|
size = self.size
|
|
|
|
with self.path.open("rb") as fp:
|
|
|
|
# Chunk at 25% of the file
|
|
|
|
fp.seek(floor(size * 25 / 100), 0)
|
|
|
|
file_data = fp.read(CHUNK_SIZE)
|
|
|
|
file_hash = hasher(file_data)
|
|
|
|
|
|
|
|
# Chunk at 60% of the file
|
|
|
|
fp.seek(floor(size * 60 / 100), 0)
|
|
|
|
file_data = fp.read(CHUNK_SIZE)
|
|
|
|
file_hash.update(file_data)
|
|
|
|
|
|
|
|
# Last chunk of the file
|
|
|
|
fp.seek(-CHUNK_SIZE, 2)
|
|
|
|
file_data = fp.read(CHUNK_SIZE)
|
|
|
|
file_hash.update(file_data)
|
|
|
|
return file_hash.digest()
|
2021-10-29 04:22:12 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
def _read_info(self, field):
|
2021-06-21 20:44:05 +00:00
|
|
|
# print(f"_read_info({field}) for {self}")
|
2020-01-01 02:16:27 +00:00
|
|
|
if field in ("size", "mtime"):
|
2012-08-09 14:53:24 +00:00
|
|
|
stats = self.path.stat()
|
2009-10-22 15:23:32 +00:00
|
|
|
self.size = nonone(stats.st_size, 0)
|
|
|
|
self.mtime = nonone(stats.st_mtime, 0)
|
2022-03-28 03:27:13 +00:00
|
|
|
elif field == "digest_partial":
|
2022-07-08 02:52:22 +00:00
|
|
|
self.digest_partial = filesdb.get(self.path, "digest_partial")
|
|
|
|
if self.digest_partial is None:
|
2023-06-08 06:14:52 +00:00
|
|
|
# If file is smaller than partial requirements just use the full digest
|
|
|
|
if self.size < PARTIAL_OFFSET_SIZE[0] + PARTIAL_OFFSET_SIZE[1]:
|
|
|
|
self.digest_partial = self.digest
|
|
|
|
else:
|
|
|
|
self.digest_partial = self._calc_digest_partial()
|
2022-07-08 02:52:22 +00:00
|
|
|
filesdb.put(self.path, "digest_partial", self.digest_partial)
|
2022-03-28 03:27:13 +00:00
|
|
|
elif field == "digest":
|
2022-07-08 02:52:22 +00:00
|
|
|
self.digest = filesdb.get(self.path, "digest")
|
|
|
|
if self.digest is None:
|
|
|
|
self.digest = self._calc_digest()
|
|
|
|
filesdb.put(self.path, "digest", self.digest)
|
2022-03-28 03:27:13 +00:00
|
|
|
elif field == "digest_samples":
|
|
|
|
size = self.size
|
|
|
|
# Might as well hash such small files entirely.
|
|
|
|
if size <= MIN_FILE_SIZE:
|
2023-06-08 06:14:52 +00:00
|
|
|
self.digest_samples = self.digest
|
2022-03-28 03:27:13 +00:00
|
|
|
return
|
2022-07-08 02:52:22 +00:00
|
|
|
self.digest_samples = filesdb.get(self.path, "digest_samples")
|
|
|
|
if self.digest_samples is None:
|
|
|
|
self.digest_samples = self._calc_digest_samples()
|
|
|
|
filesdb.put(self.path, "digest_samples", self.digest_samples)
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
def _read_all_info(self, attrnames=None):
|
|
|
|
"""Cache all possible info.
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
If `attrnames` is not None, caches only attrnames.
|
|
|
|
"""
|
|
|
|
if attrnames is None:
|
2012-05-29 21:39:54 +00:00
|
|
|
attrnames = self.INITIAL_INFO.keys()
|
2009-10-22 15:23:32 +00:00
|
|
|
for attrname in attrnames:
|
2012-05-29 21:39:54 +00:00
|
|
|
getattr(self, attrname)
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
# --- Public
|
2009-10-22 15:23:32 +00:00
|
|
|
@classmethod
|
|
|
|
def can_handle(cls, path):
|
2021-08-15 09:10:18 +00:00
|
|
|
"""Returns whether this file wrapper class can handle ``path``."""
|
2022-03-28 04:50:03 +00:00
|
|
|
return not path.is_symlink() and path.is_file()
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2023-01-12 05:07:06 +00:00
|
|
|
def exists(self) -> bool:
|
|
|
|
"""Safely check if the underlying file exists, treat error as non-existent"""
|
|
|
|
try:
|
|
|
|
return self.path.exists()
|
|
|
|
except OSError as ex:
|
|
|
|
logging.warning(f"Checking {self.path} raised: {ex}")
|
|
|
|
return False
|
|
|
|
|
2009-10-23 12:56:52 +00:00
|
|
|
def rename(self, newname):
|
|
|
|
if newname == self.name:
|
|
|
|
return
|
2022-03-28 04:50:03 +00:00
|
|
|
destpath = self.path.parent.joinpath(newname)
|
2012-08-09 14:53:24 +00:00
|
|
|
if destpath.exists():
|
2022-03-28 04:50:03 +00:00
|
|
|
raise AlreadyExistsError(newname, self.path.parent)
|
2009-10-22 15:23:32 +00:00
|
|
|
try:
|
2012-08-09 14:53:24 +00:00
|
|
|
self.path.rename(destpath)
|
2022-04-28 01:53:12 +00:00
|
|
|
except OSError:
|
2009-10-22 15:23:32 +00:00
|
|
|
raise OperationError(self)
|
2012-08-09 14:53:24 +00:00
|
|
|
if not destpath.exists():
|
2009-10-22 15:23:32 +00:00
|
|
|
raise OperationError(self)
|
|
|
|
self.path = destpath
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2013-07-14 21:43:58 +00:00
|
|
|
def get_display_info(self, group, delta):
|
2021-08-15 09:10:18 +00:00
|
|
|
"""Returns a display-ready dict of dupe's data."""
|
2013-07-14 21:43:58 +00:00
|
|
|
raise NotImplementedError()
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
# --- Properties
|
2009-10-22 15:23:32 +00:00
|
|
|
@property
|
|
|
|
def extension(self):
|
|
|
|
return get_file_ext(self.name)
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
@property
|
|
|
|
def name(self):
|
2013-11-16 17:06:16 +00:00
|
|
|
return self.path.name
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2011-06-15 15:58:33 +00:00
|
|
|
@property
|
|
|
|
def folder_path(self):
|
2022-03-28 04:50:03 +00:00
|
|
|
return self.path.parent
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2009-10-22 15:23:32 +00:00
|
|
|
|
2011-04-12 11:22:29 +00:00
|
|
|
class Folder(File):
|
|
|
|
"""A wrapper around a folder path.
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
It has the size/digest info of a File, but its value is the sum of its subitems.
|
2011-04-12 11:22:29 +00:00
|
|
|
"""
|
2020-01-01 02:16:27 +00:00
|
|
|
|
|
|
|
__slots__ = File.__slots__ + ("_subfolders",)
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2021-10-29 04:22:12 +00:00
|
|
|
def __init__(self, path):
|
|
|
|
File.__init__(self, path)
|
2022-03-31 03:58:01 +00:00
|
|
|
self.size = NOT_SET
|
2011-04-12 11:22:29 +00:00
|
|
|
self._subfolders = None
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2011-04-12 11:22:29 +00:00
|
|
|
def _all_items(self):
|
|
|
|
folders = self.subfolders
|
2021-10-29 04:22:12 +00:00
|
|
|
files = get_files(self.path)
|
2011-04-12 11:22:29 +00:00
|
|
|
return folders + files
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2011-04-12 11:22:29 +00:00
|
|
|
def _read_info(self, field):
|
2021-06-21 20:44:05 +00:00
|
|
|
# print(f"_read_info({field}) for Folder {self}")
|
2020-01-01 02:16:27 +00:00
|
|
|
if field in {"size", "mtime"}:
|
2011-04-12 11:22:29 +00:00
|
|
|
size = sum((f.size for f in self._all_items()), 0)
|
|
|
|
self.size = size
|
2012-08-09 14:53:24 +00:00
|
|
|
stats = self.path.stat()
|
2011-04-12 11:22:29 +00:00
|
|
|
self.mtime = nonone(stats.st_mtime, 0)
|
2022-03-28 03:27:13 +00:00
|
|
|
elif field in {"digest", "digest_partial", "digest_samples"}:
|
2011-04-12 11:22:29 +00:00
|
|
|
# What's sensitive here is that we must make sure that subfiles'
|
2022-03-28 03:27:13 +00:00
|
|
|
# digest are always added up in the same order, but we also want a
|
|
|
|
# different digest if a file gets moved in a different subdirectory.
|
2021-06-21 17:03:21 +00:00
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
def get_dir_digest_concat():
|
2011-04-12 11:22:29 +00:00
|
|
|
items = self._all_items()
|
2014-10-13 19:08:59 +00:00
|
|
|
items.sort(key=lambda f: f.path)
|
2022-03-28 03:27:13 +00:00
|
|
|
digests = [getattr(f, field) for f in items]
|
|
|
|
return b"".join(digests)
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
digest = hasher(get_dir_digest_concat()).digest()
|
2011-04-12 11:22:29 +00:00
|
|
|
setattr(self, field, digest)
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2011-04-12 11:22:29 +00:00
|
|
|
@property
|
|
|
|
def subfolders(self):
|
|
|
|
if self._subfolders is None:
|
2022-03-30 03:35:38 +00:00
|
|
|
with os.scandir(self.path) as iter:
|
2022-03-31 03:58:01 +00:00
|
|
|
subfolders = [p for p in iter if not p.is_symlink() and p.is_dir()]
|
|
|
|
self._subfolders = [self.__class__(p) for p in subfolders]
|
2011-04-12 11:22:29 +00:00
|
|
|
return self._subfolders
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2011-04-14 10:55:50 +00:00
|
|
|
@classmethod
|
|
|
|
def can_handle(cls, path):
|
2022-03-28 04:50:03 +00:00
|
|
|
return not path.is_symlink() and path.is_dir()
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2011-04-12 11:22:29 +00:00
|
|
|
|
2021-10-29 04:22:12 +00:00
|
|
|
def get_file(path, fileclasses=[File]):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Wraps ``path`` around its appropriate :class:`File` class.
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
Whether a class is "appropriate" is decided by :meth:`File.can_handle`
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
:param Path path: path to wrap
|
|
|
|
:param fileclasses: List of candidate :class:`File` classes
|
|
|
|
"""
|
2009-10-23 12:56:52 +00:00
|
|
|
for fileclass in fileclasses:
|
|
|
|
if fileclass.can_handle(path):
|
2021-10-29 04:22:12 +00:00
|
|
|
return fileclass(path)
|
2009-10-23 12:56:52 +00:00
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2021-10-29 04:22:12 +00:00
|
|
|
def get_files(path, fileclasses=[File]):
|
2013-08-18 22:36:09 +00:00
|
|
|
"""Returns a list of :class:`File` for each file contained in ``path``.
|
2014-10-13 19:08:59 +00:00
|
|
|
|
2013-08-18 22:36:09 +00:00
|
|
|
:param Path path: path to scan
|
|
|
|
:param fileclasses: List of candidate :class:`File` classes
|
|
|
|
"""
|
2009-10-23 12:56:52 +00:00
|
|
|
assert all(issubclass(fileclass, File) for fileclass in fileclasses)
|
2009-10-22 15:23:32 +00:00
|
|
|
try:
|
2009-10-23 12:56:52 +00:00
|
|
|
result = []
|
2022-03-30 03:35:38 +00:00
|
|
|
with os.scandir(path) as iter:
|
|
|
|
for item in iter:
|
|
|
|
file = get_file(item, fileclasses=fileclasses)
|
|
|
|
if file is not None:
|
|
|
|
result.append(file)
|
2009-10-23 12:56:52 +00:00
|
|
|
return result
|
2022-04-28 01:53:12 +00:00
|
|
|
except OSError:
|
2009-10-22 15:23:32 +00:00
|
|
|
raise InvalidPath(path)
|