mirror of
https://github.com/arsenetar/dupeguru.git
synced 2026-02-03 20:01:38 +00:00
dgse qt: removed all hsfs usages.
--HG-- extra : convert_revision : svn%3Ac306627e-7827-47d3-bdf0-9a457c9553a1/trunk%40200
This commit is contained in:
@@ -18,10 +18,10 @@ from hsutil.path import Path
|
||||
from hsutil.testcase import TestCase
|
||||
from hsutil.decorators import log_calls
|
||||
from hsutil import io
|
||||
import hsfs.phys
|
||||
|
||||
from . import data
|
||||
from .results_test import GetTestGroups
|
||||
from .. import engine, data
|
||||
from .. import engine, fs
|
||||
try:
|
||||
from ..app_cocoa import DupeGuru as DupeGuruBase
|
||||
except ImportError:
|
||||
@@ -35,7 +35,6 @@ class DupeGuru(DupeGuruBase):
|
||||
def _start_job(self, jobid, func):
|
||||
func(nulljob)
|
||||
|
||||
|
||||
def r2np(rows):
|
||||
#Transforms a list of rows [1,2,3] into a list of node paths [[1],[2],[3]]
|
||||
return [[i] for i in rows]
|
||||
@@ -310,15 +309,15 @@ class TCDupeGuru(TestCase):
|
||||
|
||||
class TCDupeGuru_renameSelected(TestCase):
|
||||
def setUp(self):
|
||||
p = Path(tempfile.mkdtemp())
|
||||
fp = open(str(p + 'foo bar 1'),mode='w')
|
||||
p = self.tmppath()
|
||||
fp = open(unicode(p + 'foo bar 1'),mode='w')
|
||||
fp.close()
|
||||
fp = open(str(p + 'foo bar 2'),mode='w')
|
||||
fp = open(unicode(p + 'foo bar 2'),mode='w')
|
||||
fp.close()
|
||||
fp = open(str(p + 'foo bar 3'),mode='w')
|
||||
fp = open(unicode(p + 'foo bar 3'),mode='w')
|
||||
fp.close()
|
||||
refdir = hsfs.phys.Directory(None,str(p))
|
||||
matches = engine.getmatches(refdir.files)
|
||||
files = fs.get_files(p)
|
||||
matches = engine.getmatches(files)
|
||||
groups = engine.get_groups(matches)
|
||||
g = groups[0]
|
||||
g.prioritize(lambda x:x.name)
|
||||
@@ -327,45 +326,41 @@ class TCDupeGuru_renameSelected(TestCase):
|
||||
self.app = app
|
||||
self.groups = groups
|
||||
self.p = p
|
||||
self.refdir = refdir
|
||||
|
||||
def tearDown(self):
|
||||
shutil.rmtree(str(self.p))
|
||||
self.files = files
|
||||
|
||||
def test_simple(self):
|
||||
app = self.app
|
||||
refdir = self.refdir
|
||||
g = self.groups[0]
|
||||
app.SelectPowerMarkerNodePaths(r2np([0]))
|
||||
self.assert_(app.RenameSelected('renamed'))
|
||||
self.assert_('renamed' in refdir)
|
||||
self.assert_('foo bar 2' not in refdir)
|
||||
self.assert_(g.dupes[0] is refdir['renamed'])
|
||||
self.assert_(g.dupes[0] in refdir)
|
||||
assert app.RenameSelected('renamed')
|
||||
names = io.listdir(self.p)
|
||||
assert 'renamed' in names
|
||||
assert 'foo bar 2' not in names
|
||||
eq_(g.dupes[0].name, 'renamed')
|
||||
|
||||
def test_none_selected(self):
|
||||
app = self.app
|
||||
refdir = self.refdir
|
||||
g = self.groups[0]
|
||||
app.SelectPowerMarkerNodePaths([])
|
||||
self.mock(logging, 'warning', log_calls(lambda msg: None))
|
||||
self.assert_(not app.RenameSelected('renamed'))
|
||||
assert not app.RenameSelected('renamed')
|
||||
msg = logging.warning.calls[0]['msg']
|
||||
self.assertEqual('dupeGuru Warning: list index out of range', msg)
|
||||
self.assert_('renamed' not in refdir)
|
||||
self.assert_('foo bar 2' in refdir)
|
||||
self.assert_(g.dupes[0] is refdir['foo bar 2'])
|
||||
eq_('dupeGuru Warning: list index out of range', msg)
|
||||
names = io.listdir(self.p)
|
||||
assert 'renamed' not in names
|
||||
assert 'foo bar 2' in names
|
||||
eq_(g.dupes[0].name, 'foo bar 2')
|
||||
|
||||
def test_name_already_exists(self):
|
||||
app = self.app
|
||||
refdir = self.refdir
|
||||
g = self.groups[0]
|
||||
app.SelectPowerMarkerNodePaths(r2np([0]))
|
||||
self.mock(logging, 'warning', log_calls(lambda msg: None))
|
||||
self.assert_(not app.RenameSelected('foo bar 1'))
|
||||
assert not app.RenameSelected('foo bar 1')
|
||||
msg = logging.warning.calls[0]['msg']
|
||||
self.assert_(msg.startswith('dupeGuru Warning: \'foo bar 2\' already exists in'))
|
||||
self.assert_('foo bar 1' in refdir)
|
||||
self.assert_('foo bar 2' in refdir)
|
||||
self.assert_(g.dupes[0] is refdir['foo bar 2'])
|
||||
assert msg.startswith('dupeGuru Warning: \'foo bar 1\' already exists in')
|
||||
names = io.listdir(self.p)
|
||||
assert 'foo bar 1' in names
|
||||
assert 'foo bar 2' in names
|
||||
eq_(g.dupes[0].name, 'foo bar 2')
|
||||
|
||||
|
||||
@@ -13,12 +13,11 @@ from hsutil.testcase import TestCase
|
||||
from hsutil import io
|
||||
from hsutil.path import Path
|
||||
from hsutil.decorators import log_calls
|
||||
import hsfs as fs
|
||||
import hsfs.phys
|
||||
import hsutil.files
|
||||
from hsutil.job import nulljob
|
||||
|
||||
from .. import data, app
|
||||
from . import data
|
||||
from .. import app, fs
|
||||
from ..app import DupeGuru as DupeGuruBase
|
||||
|
||||
class DupeGuru(DupeGuruBase):
|
||||
@@ -59,27 +58,27 @@ class TCDupeGuru(TestCase):
|
||||
# The goal here is just to have a test for a previous blowup I had. I know my test coverage
|
||||
# for this unit is pathetic. What's done is done. My approach now is to add tests for
|
||||
# every change I want to make. The blowup was caused by a missing import.
|
||||
dupe_parent = fs.Directory(None, 'foo')
|
||||
dupe = fs.File(dupe_parent, 'bar')
|
||||
dupe.copy = log_calls(lambda dest, newname: None)
|
||||
p = self.tmppath()
|
||||
io.open(p + 'foo', 'w').close()
|
||||
self.mock(hsutil.files, 'copy', log_calls(lambda source_path, dest_path: None))
|
||||
self.mock(os, 'makedirs', lambda path: None) # We don't want the test to create that fake directory
|
||||
self.mock(fs.phys, 'Directory', fs.Directory) # We don't want an error because makedirs didn't work
|
||||
app = DupeGuru()
|
||||
app.copy_or_move(dupe, True, 'some_destination', 0)
|
||||
app.directories.add_path(p)
|
||||
[f] = app.directories.get_files()
|
||||
app.copy_or_move(f, True, 'some_destination', 0)
|
||||
self.assertEqual(1, len(hsutil.files.copy.calls))
|
||||
call = hsutil.files.copy.calls[0]
|
||||
self.assertEqual('some_destination', call['dest_path'])
|
||||
self.assertEqual(dupe.path, call['source_path'])
|
||||
self.assertEqual(f.path, call['source_path'])
|
||||
|
||||
def test_copy_or_move_clean_empty_dirs(self):
|
||||
tmppath = Path(self.tmpdir())
|
||||
sourcepath = tmppath + 'source'
|
||||
io.mkdir(sourcepath)
|
||||
io.open(sourcepath + 'myfile', 'w')
|
||||
tmpdir = hsfs.phys.Directory(None, unicode(tmppath))
|
||||
myfile = tmpdir['source']['myfile']
|
||||
app = DupeGuru()
|
||||
app.directories.add_path(tmppath)
|
||||
[myfile] = app.directories.get_files()
|
||||
self.mock(app, 'clean_empty_dirs', log_calls(lambda path: None))
|
||||
app.copy_or_move(myfile, False, tmppath + 'dest', 0)
|
||||
calls = app.clean_empty_dirs.calls
|
||||
@@ -87,9 +86,14 @@ class TCDupeGuru(TestCase):
|
||||
self.assertEqual(sourcepath, calls[0]['path'])
|
||||
|
||||
def test_Scan_with_objects_evaluating_to_false(self):
|
||||
class FakeFile(fs.File):
|
||||
def __nonzero__(self):
|
||||
return False
|
||||
|
||||
|
||||
# At some point, any() was used in a wrong way that made Scan() wrongly return 1
|
||||
app = DupeGuru()
|
||||
f1, f2 = [fs.File(None, 'foo') for i in range(2)]
|
||||
f1, f2 = [FakeFile('foo') for i in range(2)]
|
||||
f1.is_ref, f2.is_ref = (False, False)
|
||||
assert not (bool(f1) and bool(f2))
|
||||
app.directories.get_files = lambda: [f1, f2]
|
||||
|
||||
45
base/py/tests/data.py
Normal file
45
base/py/tests/data.py
Normal file
@@ -0,0 +1,45 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Created By: Virgil Dupras
|
||||
# Created On: 2009-10-23
|
||||
# $Id$
|
||||
# Copyright 2009 Hardcoded Software (http://www.hardcoded.net)
|
||||
#
|
||||
# This software is licensed under the "HS" License as described in the "LICENSE" file,
|
||||
# which should be included with this package. The terms are also available at
|
||||
# http://www.hardcoded.net/licenses/hs_license
|
||||
|
||||
# data module for tests
|
||||
|
||||
from hsutil.str import format_size
|
||||
from dupeguru.data import format_path, cmp_value
|
||||
|
||||
COLUMNS = [
|
||||
{'attr':'name','display':'Filename'},
|
||||
{'attr':'path','display':'Directory'},
|
||||
{'attr':'size','display':'Size (KB)'},
|
||||
{'attr':'extension','display':'Kind'},
|
||||
]
|
||||
|
||||
METADATA_TO_READ = ['size']
|
||||
|
||||
def GetDisplayInfo(dupe, group, delta):
|
||||
size = dupe.size
|
||||
m = group.get_match_of(dupe)
|
||||
if m and delta:
|
||||
r = group.ref
|
||||
size -= r.size
|
||||
return [
|
||||
dupe.name,
|
||||
format_path(dupe.path),
|
||||
format_size(size, 0, 1, False),
|
||||
dupe.extension,
|
||||
]
|
||||
|
||||
def GetDupeSortKey(dupe, get_group, key, delta):
|
||||
r = cmp_value(getattr(dupe, COLUMNS[key]['attr']))
|
||||
if delta and (key == 2):
|
||||
r -= cmp_value(getattr(get_group().ref, COLUMNS[key]['attr']))
|
||||
return r
|
||||
|
||||
def GetGroupSortKey(group, key):
|
||||
return cmp_value(getattr(group.ref, COLUMNS[key]['attr']))
|
||||
@@ -10,20 +10,43 @@
|
||||
import os.path as op
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
|
||||
from nose.tools import eq_
|
||||
|
||||
from hsutil import job, io
|
||||
from hsutil import io
|
||||
from hsutil.path import Path
|
||||
from hsutil.testcase import TestCase
|
||||
import hsfs.phys
|
||||
from hsfs.tests import phys_test
|
||||
|
||||
from ..directories import *
|
||||
|
||||
testpath = Path(TestCase.datadirpath())
|
||||
|
||||
def create_fake_fs(rootpath):
|
||||
rootpath = rootpath + 'fs'
|
||||
io.mkdir(rootpath)
|
||||
io.mkdir(rootpath + 'dir1')
|
||||
io.mkdir(rootpath + 'dir2')
|
||||
io.mkdir(rootpath + 'dir3')
|
||||
fp = io.open(rootpath + 'file1.test', 'w')
|
||||
fp.write('1')
|
||||
fp.close()
|
||||
fp = io.open(rootpath + 'file2.test', 'w')
|
||||
fp.write('12')
|
||||
fp.close()
|
||||
fp = io.open(rootpath + 'file3.test', 'w')
|
||||
fp.write('123')
|
||||
fp.close()
|
||||
fp = io.open(rootpath + ('dir1', 'file1.test'), 'w')
|
||||
fp.write('1')
|
||||
fp.close()
|
||||
fp = io.open(rootpath + ('dir2', 'file2.test'), 'w')
|
||||
fp.write('12')
|
||||
fp.close()
|
||||
fp = io.open(rootpath + ('dir3', 'file3.test'), 'w')
|
||||
fp.write('123')
|
||||
fp.close()
|
||||
return rootpath
|
||||
|
||||
class TCDirectories(TestCase):
|
||||
def test_empty(self):
|
||||
d = Directories()
|
||||
@@ -33,13 +56,11 @@ class TCDirectories(TestCase):
|
||||
def test_add_path(self):
|
||||
d = Directories()
|
||||
p = testpath + 'utils'
|
||||
added = d.add_path(p)
|
||||
d.add_path(p)
|
||||
self.assertEqual(1,len(d))
|
||||
self.assert_(p in d)
|
||||
self.assert_((p + 'foobar') in d)
|
||||
self.assert_(p[:-1] not in d)
|
||||
self.assertEqual(p,added.path)
|
||||
self.assert_(d[0] is added)
|
||||
p = self.tmppath()
|
||||
d.add_path(p)
|
||||
self.assertEqual(2,len(d))
|
||||
@@ -53,13 +74,13 @@ class TCDirectories(TestCase):
|
||||
self.assertRaises(AlreadyThereError, d.add_path, p + 'foobar')
|
||||
self.assertEqual(1, len(d))
|
||||
|
||||
def test_AddPath_containing_paths_already_there(self):
|
||||
def test_add_path_containing_paths_already_there(self):
|
||||
d = Directories()
|
||||
d.add_path(testpath + 'utils')
|
||||
self.assertEqual(1, len(d))
|
||||
added = d.add_path(testpath)
|
||||
self.assertEqual(1, len(d))
|
||||
self.assert_(added is d[0])
|
||||
d.add_path(testpath)
|
||||
eq_(len(d), 1)
|
||||
eq_(d[0], testpath)
|
||||
|
||||
def test_AddPath_non_latin(self):
|
||||
p = Path(self.tmpdir())
|
||||
@@ -114,7 +135,7 @@ class TCDirectories(TestCase):
|
||||
|
||||
def test_set_state_keep_state_dict_size_to_minimum(self):
|
||||
d = Directories()
|
||||
p = Path(phys_test.create_fake_fs(self.tmpdir()))
|
||||
p = create_fake_fs(self.tmppath())
|
||||
d.add_path(p)
|
||||
d.set_state(p,STATE_REFERENCE)
|
||||
d.set_state(p + 'dir1',STATE_REFERENCE)
|
||||
@@ -129,7 +150,7 @@ class TCDirectories(TestCase):
|
||||
|
||||
def test_get_files(self):
|
||||
d = Directories()
|
||||
p = Path(phys_test.create_fake_fs(self.tmpdir()))
|
||||
p = create_fake_fs(self.tmppath())
|
||||
d.add_path(p)
|
||||
d.set_state(p + 'dir1',STATE_REFERENCE)
|
||||
d.set_state(p + 'dir2',STATE_EXCLUDED)
|
||||
@@ -177,52 +198,28 @@ class TCDirectories(TestCase):
|
||||
except LookupError:
|
||||
self.fail()
|
||||
|
||||
def test_default_dirclass(self):
|
||||
self.assert_(Directories().dirclass is hsfs.phys.Directory)
|
||||
|
||||
def test_dirclass(self):
|
||||
class MySpecialDirclass(hsfs.phys.Directory): pass
|
||||
d = Directories()
|
||||
d.dirclass = MySpecialDirclass
|
||||
d.add_path(testpath)
|
||||
self.assert_(isinstance(d[0], MySpecialDirclass))
|
||||
|
||||
def test_load_from_file_with_invalid_path(self):
|
||||
#This test simulates a load from file resulting in a
|
||||
#InvalidPath raise. Other directories must be loaded.
|
||||
d1 = Directories()
|
||||
d1.add_path(testpath + 'utils')
|
||||
#Will raise InvalidPath upon loading
|
||||
d1.add_path(self.tmppath()).name = 'does_not_exist'
|
||||
p = self.tmppath()
|
||||
d1.add_path(p)
|
||||
io.rmdir(p)
|
||||
tmpxml = op.join(self.tmpdir(), 'directories_testunit.xml')
|
||||
d1.save_to_file(tmpxml)
|
||||
d2 = Directories()
|
||||
d2.load_from_file(tmpxml)
|
||||
self.assertEqual(1, len(d2))
|
||||
|
||||
def test_load_from_file_with_same_paths(self):
|
||||
#This test simulates a load from file resulting in a
|
||||
#AlreadyExists raise. Other directories must be loaded.
|
||||
d1 = Directories()
|
||||
p1 = self.tmppath()
|
||||
p2 = self.tmppath()
|
||||
d1.add_path(p1)
|
||||
d1.add_path(p2)
|
||||
#Will raise AlreadyExists upon loading
|
||||
d1.add_path(self.tmppath()).name = unicode(p1)
|
||||
tmpxml = op.join(self.tmpdir(), 'directories_testunit.xml')
|
||||
d1.save_to_file(tmpxml)
|
||||
d2 = Directories()
|
||||
d2.load_from_file(tmpxml)
|
||||
self.assertEqual(2, len(d2))
|
||||
|
||||
def test_unicode_save(self):
|
||||
d = Directories()
|
||||
p1 = self.tmppath() + u'hello\xe9'
|
||||
io.mkdir(p1)
|
||||
io.mkdir(p1 + u'foo\xe9')
|
||||
d.add_path(p1)
|
||||
d.set_state(d[0][0].path, STATE_EXCLUDED)
|
||||
d.set_state(p1 + u'foo\xe9', STATE_EXCLUDED)
|
||||
tmpxml = op.join(self.tmpdir(), 'directories_testunit.xml')
|
||||
try:
|
||||
d.save_to_file(tmpxml)
|
||||
@@ -231,7 +228,7 @@ class TCDirectories(TestCase):
|
||||
|
||||
def test_get_files_refreshes_its_directories(self):
|
||||
d = Directories()
|
||||
p = Path(phys_test.create_fake_fs(self.tmpdir()))
|
||||
p = create_fake_fs(self.tmppath())
|
||||
d.add_path(p)
|
||||
files = d.get_files()
|
||||
self.assertEqual(6, len(list(files)))
|
||||
@@ -258,16 +255,6 @@ class TCDirectories(TestCase):
|
||||
d.set_state(hidden_dir_path, STATE_NORMAL)
|
||||
self.assertEqual(d.get_state(hidden_dir_path), STATE_NORMAL)
|
||||
|
||||
def test_special_dirclasses(self):
|
||||
# if a path is in special_dirclasses, use this class instead
|
||||
class MySpecialDirclass(hsfs.phys.Directory): pass
|
||||
d = Directories()
|
||||
p1 = self.tmppath()
|
||||
p2 = self.tmppath()
|
||||
d.special_dirclasses[p1] = MySpecialDirclass
|
||||
self.assert_(isinstance(d.add_path(p2), hsfs.phys.Directory))
|
||||
self.assert_(isinstance(d.add_path(p1), MySpecialDirclass))
|
||||
|
||||
def test_default_path_state_override(self):
|
||||
# It's possible for a subclass to override the default state of a path
|
||||
class MyDirectories(Directories):
|
||||
|
||||
@@ -16,8 +16,8 @@ from hsutil.path import Path
|
||||
from hsutil.testcase import TestCase
|
||||
from hsutil.misc import first
|
||||
|
||||
from . import engine_test
|
||||
from .. import data, engine
|
||||
from . import engine_test, data
|
||||
from .. import engine
|
||||
from ..results import *
|
||||
|
||||
class NamedObject(engine_test.NamedObject):
|
||||
|
||||
@@ -132,8 +132,6 @@ def test_content_scan_doesnt_put_md5_in_words_at_the_end():
|
||||
f[1].md5 = f[1].md5partial = '\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f'
|
||||
r = s.GetDupeGroups(f)
|
||||
g = r[0]
|
||||
eq_(g.ref.words, ['--'])
|
||||
eq_(g.dupes[0].words, ['--'])
|
||||
|
||||
def test_extension_is_not_counted_in_filename_scan():
|
||||
s = Scanner()
|
||||
|
||||
Reference in New Issue
Block a user