2009-10-23 12:56:52 +00:00
|
|
|
# Created By: Virgil Dupras
|
|
|
|
# Created On: 2009-10-23
|
2015-01-03 21:30:57 +00:00
|
|
|
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
|
2020-01-01 02:16:27 +00:00
|
|
|
#
|
|
|
|
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
|
|
|
|
# which should be included with this package. The terms are also available at
|
2015-01-03 21:33:16 +00:00
|
|
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
2009-10-23 12:56:52 +00:00
|
|
|
|
2022-04-30 10:16:46 +00:00
|
|
|
import typing
|
|
|
|
from os import urandom
|
|
|
|
|
|
|
|
from pathlib import Path
|
|
|
|
from hscommon.testutil import eq_
|
|
|
|
from core.tests.directories_test import create_fake_fs
|
|
|
|
|
2022-05-09 06:40:08 +00:00
|
|
|
from core import fs
|
2022-04-30 10:16:46 +00:00
|
|
|
|
|
|
|
hasher: typing.Callable
|
2022-03-28 03:27:13 +00:00
|
|
|
try:
|
|
|
|
import xxhash
|
|
|
|
|
|
|
|
hasher = xxhash.xxh128
|
|
|
|
except ImportError:
|
|
|
|
import hashlib
|
|
|
|
|
|
|
|
hasher = hashlib.md5
|
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2021-06-21 20:44:05 +00:00
|
|
|
def create_fake_fs_with_random_data(rootpath):
|
2022-03-28 04:50:03 +00:00
|
|
|
rootpath = rootpath.joinpath("fs")
|
2021-06-21 20:44:05 +00:00
|
|
|
rootpath.mkdir()
|
2022-03-28 04:50:03 +00:00
|
|
|
rootpath.joinpath("dir1").mkdir()
|
|
|
|
rootpath.joinpath("dir2").mkdir()
|
|
|
|
rootpath.joinpath("dir3").mkdir()
|
2021-06-21 20:44:05 +00:00
|
|
|
data1 = urandom(200 * 1024) # 200KiB
|
|
|
|
data2 = urandom(1024 * 1024) # 1MiB
|
|
|
|
data3 = urandom(10 * 1024 * 1024) # 10MiB
|
2022-03-28 04:50:03 +00:00
|
|
|
with rootpath.joinpath("file1.test").open("wb") as fp:
|
|
|
|
fp.write(data1)
|
|
|
|
with rootpath.joinpath("file2.test").open("wb") as fp:
|
|
|
|
fp.write(data2)
|
|
|
|
with rootpath.joinpath("file3.test").open("wb") as fp:
|
|
|
|
fp.write(data3)
|
|
|
|
with rootpath.joinpath("dir1", "file1.test").open("wb") as fp:
|
|
|
|
fp.write(data1)
|
|
|
|
with rootpath.joinpath("dir2", "file2.test").open("wb") as fp:
|
|
|
|
fp.write(data2)
|
|
|
|
with rootpath.joinpath("dir3", "file3.test").open("wb") as fp:
|
|
|
|
fp.write(data3)
|
2021-06-21 20:44:05 +00:00
|
|
|
return rootpath
|
|
|
|
|
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_size_aggregates_subfiles(tmpdir):
|
|
|
|
p = create_fake_fs(Path(str(tmpdir)))
|
2011-04-12 11:22:29 +00:00
|
|
|
b = fs.Folder(p)
|
2011-01-05 10:11:21 +00:00
|
|
|
eq_(b.size, 12)
|
|
|
|
|
2020-01-01 02:16:27 +00:00
|
|
|
|
2022-03-28 03:27:13 +00:00
|
|
|
def test_digest_aggregate_subfiles_sorted(tmpdir):
|
|
|
|
# dir.allfiles can return child in any order. Thus, bundle.digest must aggregate
|
|
|
|
# all files' digests it contains, but it must make sure that it does so in the
|
2020-01-01 02:16:27 +00:00
|
|
|
# same order everytime.
|
2021-06-21 20:44:05 +00:00
|
|
|
p = create_fake_fs_with_random_data(Path(str(tmpdir)))
|
2011-04-12 11:22:29 +00:00
|
|
|
b = fs.Folder(p)
|
2022-03-28 04:50:03 +00:00
|
|
|
digest1 = fs.File(p.joinpath("dir1", "file1.test")).digest
|
|
|
|
digest2 = fs.File(p.joinpath("dir2", "file2.test")).digest
|
|
|
|
digest3 = fs.File(p.joinpath("dir3", "file3.test")).digest
|
|
|
|
digest4 = fs.File(p.joinpath("file1.test")).digest
|
|
|
|
digest5 = fs.File(p.joinpath("file2.test")).digest
|
|
|
|
digest6 = fs.File(p.joinpath("file3.test")).digest
|
2022-03-28 03:27:13 +00:00
|
|
|
# The expected digest is the hash of digests for folders and the direct digest for files
|
|
|
|
folder_digest1 = hasher(digest1).digest()
|
|
|
|
folder_digest2 = hasher(digest2).digest()
|
|
|
|
folder_digest3 = hasher(digest3).digest()
|
|
|
|
digest = hasher(folder_digest1 + folder_digest2 + folder_digest3 + digest4 + digest5 + digest6).digest()
|
|
|
|
eq_(b.digest, digest)
|
|
|
|
|
|
|
|
|
|
|
|
def test_partial_digest_aggregate_subfile_sorted(tmpdir):
|
2021-06-21 20:44:05 +00:00
|
|
|
p = create_fake_fs_with_random_data(Path(str(tmpdir)))
|
|
|
|
b = fs.Folder(p)
|
2022-03-28 04:50:03 +00:00
|
|
|
digest1 = fs.File(p.joinpath("dir1", "file1.test")).digest_partial
|
|
|
|
digest2 = fs.File(p.joinpath("dir2", "file2.test")).digest_partial
|
|
|
|
digest3 = fs.File(p.joinpath("dir3", "file3.test")).digest_partial
|
|
|
|
digest4 = fs.File(p.joinpath("file1.test")).digest_partial
|
|
|
|
digest5 = fs.File(p.joinpath("file2.test")).digest_partial
|
|
|
|
digest6 = fs.File(p.joinpath("file3.test")).digest_partial
|
2022-03-28 03:27:13 +00:00
|
|
|
# The expected digest is the hash of digests for folders and the direct digest for files
|
|
|
|
folder_digest1 = hasher(digest1).digest()
|
|
|
|
folder_digest2 = hasher(digest2).digest()
|
|
|
|
folder_digest3 = hasher(digest3).digest()
|
|
|
|
digest = hasher(folder_digest1 + folder_digest2 + folder_digest3 + digest4 + digest5 + digest6).digest()
|
|
|
|
eq_(b.digest_partial, digest)
|
|
|
|
|
2022-03-28 04:50:03 +00:00
|
|
|
digest1 = fs.File(p.joinpath("dir1", "file1.test")).digest_samples
|
|
|
|
digest2 = fs.File(p.joinpath("dir2", "file2.test")).digest_samples
|
|
|
|
digest3 = fs.File(p.joinpath("dir3", "file3.test")).digest_samples
|
|
|
|
digest4 = fs.File(p.joinpath("file1.test")).digest_samples
|
|
|
|
digest5 = fs.File(p.joinpath("file2.test")).digest_samples
|
|
|
|
digest6 = fs.File(p.joinpath("file3.test")).digest_samples
|
2022-03-28 03:27:13 +00:00
|
|
|
# The expected digest is the digest of digests for folders and the direct digest for files
|
|
|
|
folder_digest1 = hasher(digest1).digest()
|
|
|
|
folder_digest2 = hasher(digest2).digest()
|
|
|
|
folder_digest3 = hasher(digest3).digest()
|
|
|
|
digest = hasher(folder_digest1 + folder_digest2 + folder_digest3 + digest4 + digest5 + digest6).digest()
|
|
|
|
eq_(b.digest_samples, digest)
|
2021-06-21 20:44:05 +00:00
|
|
|
|
|
|
|
|
2011-01-05 10:11:21 +00:00
|
|
|
def test_has_file_attrs(tmpdir):
|
2020-01-01 02:16:27 +00:00
|
|
|
# a Folder must behave like a file, so it must have mtime attributes
|
2011-04-12 11:22:29 +00:00
|
|
|
b = fs.Folder(Path(str(tmpdir)))
|
2011-01-05 10:11:21 +00:00
|
|
|
assert b.mtime > 0
|
2020-01-01 02:16:27 +00:00
|
|
|
eq_(b.extension, "")
|