2009-10-23 07:56:52 -05:00
|
|
|
# Created By: Virgil Dupras
|
|
|
|
# Created On: 2009-10-23
|
2015-01-03 15:30:57 -06:00
|
|
|
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
|
2019-12-31 20:16:27 -06:00
|
|
|
#
|
|
|
|
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
|
|
|
|
# which should be included with this package. The terms are also available at
|
2015-01-03 15:33:16 -06:00
|
|
|
# http://www.gnu.org/licenses/gpl-3.0.html
|
2009-10-23 07:56:52 -05:00
|
|
|
|
2022-03-27 22:27:13 -05:00
|
|
|
try:
|
|
|
|
import xxhash
|
|
|
|
|
|
|
|
hasher = xxhash.xxh128
|
|
|
|
except ImportError:
|
|
|
|
import hashlib
|
|
|
|
|
|
|
|
hasher = hashlib.md5
|
|
|
|
|
2021-06-21 15:44:05 -05:00
|
|
|
from os import urandom
|
2009-10-23 07:56:52 -05:00
|
|
|
|
2011-01-11 06:36:05 -06:00
|
|
|
from hscommon.path import Path
|
2011-01-05 04:11:21 -06:00
|
|
|
from hscommon.testutil import eq_
|
2009-12-30 04:37:57 -06:00
|
|
|
from core.tests.directories_test import create_fake_fs
|
2009-10-23 07:56:52 -05:00
|
|
|
|
|
|
|
from .. import fs
|
|
|
|
|
2019-12-31 20:16:27 -06:00
|
|
|
|
2021-06-21 15:44:05 -05:00
|
|
|
def create_fake_fs_with_random_data(rootpath):
|
|
|
|
rootpath = rootpath["fs"]
|
|
|
|
rootpath.mkdir()
|
|
|
|
rootpath["dir1"].mkdir()
|
|
|
|
rootpath["dir2"].mkdir()
|
|
|
|
rootpath["dir3"].mkdir()
|
|
|
|
fp = rootpath["file1.test"].open("wb")
|
|
|
|
data1 = urandom(200 * 1024) # 200KiB
|
|
|
|
data2 = urandom(1024 * 1024) # 1MiB
|
|
|
|
data3 = urandom(10 * 1024 * 1024) # 10MiB
|
|
|
|
fp.write(data1)
|
|
|
|
fp.close()
|
|
|
|
fp = rootpath["file2.test"].open("wb")
|
|
|
|
fp.write(data2)
|
|
|
|
fp.close()
|
|
|
|
fp = rootpath["file3.test"].open("wb")
|
|
|
|
fp.write(data3)
|
|
|
|
fp.close()
|
|
|
|
fp = rootpath["dir1"]["file1.test"].open("wb")
|
|
|
|
fp.write(data1)
|
|
|
|
fp.close()
|
|
|
|
fp = rootpath["dir2"]["file2.test"].open("wb")
|
|
|
|
fp.write(data2)
|
|
|
|
fp.close()
|
|
|
|
fp = rootpath["dir3"]["file3.test"].open("wb")
|
|
|
|
fp.write(data3)
|
|
|
|
fp.close()
|
|
|
|
return rootpath
|
|
|
|
|
|
|
|
|
2011-01-05 04:11:21 -06:00
|
|
|
def test_size_aggregates_subfiles(tmpdir):
|
|
|
|
p = create_fake_fs(Path(str(tmpdir)))
|
2011-04-12 06:22:29 -05:00
|
|
|
b = fs.Folder(p)
|
2011-01-05 04:11:21 -06:00
|
|
|
eq_(b.size, 12)
|
|
|
|
|
2019-12-31 20:16:27 -06:00
|
|
|
|
2022-03-27 22:27:13 -05:00
|
|
|
def test_digest_aggregate_subfiles_sorted(tmpdir):
|
|
|
|
# dir.allfiles can return child in any order. Thus, bundle.digest must aggregate
|
|
|
|
# all files' digests it contains, but it must make sure that it does so in the
|
2019-12-31 20:16:27 -06:00
|
|
|
# same order everytime.
|
2021-06-21 15:44:05 -05:00
|
|
|
p = create_fake_fs_with_random_data(Path(str(tmpdir)))
|
2011-04-12 06:22:29 -05:00
|
|
|
b = fs.Folder(p)
|
2022-03-27 22:27:13 -05:00
|
|
|
digest1 = fs.File(p["dir1"]["file1.test"]).digest
|
|
|
|
digest2 = fs.File(p["dir2"]["file2.test"]).digest
|
|
|
|
digest3 = fs.File(p["dir3"]["file3.test"]).digest
|
|
|
|
digest4 = fs.File(p["file1.test"]).digest
|
|
|
|
digest5 = fs.File(p["file2.test"]).digest
|
|
|
|
digest6 = fs.File(p["file3.test"]).digest
|
|
|
|
# The expected digest is the hash of digests for folders and the direct digest for files
|
|
|
|
folder_digest1 = hasher(digest1).digest()
|
|
|
|
folder_digest2 = hasher(digest2).digest()
|
|
|
|
folder_digest3 = hasher(digest3).digest()
|
|
|
|
digest = hasher(folder_digest1 + folder_digest2 + folder_digest3 + digest4 + digest5 + digest6).digest()
|
|
|
|
eq_(b.digest, digest)
|
|
|
|
|
|
|
|
|
|
|
|
def test_partial_digest_aggregate_subfile_sorted(tmpdir):
|
2021-06-21 15:44:05 -05:00
|
|
|
p = create_fake_fs_with_random_data(Path(str(tmpdir)))
|
|
|
|
b = fs.Folder(p)
|
2022-03-27 22:27:13 -05:00
|
|
|
digest1 = fs.File(p["dir1"]["file1.test"]).digest_partial
|
|
|
|
digest2 = fs.File(p["dir2"]["file2.test"]).digest_partial
|
|
|
|
digest3 = fs.File(p["dir3"]["file3.test"]).digest_partial
|
|
|
|
digest4 = fs.File(p["file1.test"]).digest_partial
|
|
|
|
digest5 = fs.File(p["file2.test"]).digest_partial
|
|
|
|
digest6 = fs.File(p["file3.test"]).digest_partial
|
|
|
|
# The expected digest is the hash of digests for folders and the direct digest for files
|
|
|
|
folder_digest1 = hasher(digest1).digest()
|
|
|
|
folder_digest2 = hasher(digest2).digest()
|
|
|
|
folder_digest3 = hasher(digest3).digest()
|
|
|
|
digest = hasher(folder_digest1 + folder_digest2 + folder_digest3 + digest4 + digest5 + digest6).digest()
|
|
|
|
eq_(b.digest_partial, digest)
|
|
|
|
|
|
|
|
digest1 = fs.File(p["dir1"]["file1.test"]).digest_samples
|
|
|
|
digest2 = fs.File(p["dir2"]["file2.test"]).digest_samples
|
|
|
|
digest3 = fs.File(p["dir3"]["file3.test"]).digest_samples
|
|
|
|
digest4 = fs.File(p["file1.test"]).digest_samples
|
|
|
|
digest5 = fs.File(p["file2.test"]).digest_samples
|
|
|
|
digest6 = fs.File(p["file3.test"]).digest_samples
|
|
|
|
# The expected digest is the digest of digests for folders and the direct digest for files
|
|
|
|
folder_digest1 = hasher(digest1).digest()
|
|
|
|
folder_digest2 = hasher(digest2).digest()
|
|
|
|
folder_digest3 = hasher(digest3).digest()
|
|
|
|
digest = hasher(folder_digest1 + folder_digest2 + folder_digest3 + digest4 + digest5 + digest6).digest()
|
|
|
|
eq_(b.digest_samples, digest)
|
2021-06-21 15:44:05 -05:00
|
|
|
|
|
|
|
|
2011-01-05 04:11:21 -06:00
|
|
|
def test_has_file_attrs(tmpdir):
|
2019-12-31 20:16:27 -06:00
|
|
|
# a Folder must behave like a file, so it must have mtime attributes
|
2011-04-12 06:22:29 -05:00
|
|
|
b = fs.Folder(Path(str(tmpdir)))
|
2011-01-05 04:11:21 -06:00
|
|
|
assert b.mtime > 0
|
2019-12-31 20:16:27 -06:00
|
|
|
eq_(b.extension, "")
|