mirror of
https://github.com/borgbackup/borg.git
synced 2025-03-15 00:21:56 +00:00
Merge pull request #616 from ThomasWaldmann/cleanup
cleanup, flake8, fixes #4
This commit is contained in:
commit
ae63f3c686
21 changed files with 62 additions and 48 deletions
|
@ -14,6 +14,9 @@ matrix:
|
||||||
- python: 3.5
|
- python: 3.5
|
||||||
os: linux
|
os: linux
|
||||||
env: TOXENV=py35
|
env: TOXENV=py35
|
||||||
|
- python: 3.5
|
||||||
|
os: linux
|
||||||
|
env: TOXENV=flake8
|
||||||
- language: generic
|
- language: generic
|
||||||
os: osx
|
os: osx
|
||||||
osx_image: xcode6.4
|
osx_image: xcode6.4
|
||||||
|
|
|
@ -1,3 +1,2 @@
|
||||||
from borg.archiver import main
|
from borg.archiver import main
|
||||||
main()
|
main()
|
||||||
|
|
||||||
|
|
|
@ -187,7 +187,7 @@ class Archive:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def duration(self):
|
def duration(self):
|
||||||
return format_timedelta(self.end-self.start)
|
return format_timedelta(self.end - self.start)
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return '''Archive name: {0.name}
|
return '''Archive name: {0.name}
|
||||||
|
@ -591,8 +591,7 @@ Number of files: {0.stats.nfiles}'''.format(self)
|
||||||
# this set must be kept complete, otherwise the RobustUnpacker might malfunction:
|
# this set must be kept complete, otherwise the RobustUnpacker might malfunction:
|
||||||
ITEM_KEYS = set([b'path', b'source', b'rdev', b'chunks',
|
ITEM_KEYS = set([b'path', b'source', b'rdev', b'chunks',
|
||||||
b'mode', b'user', b'group', b'uid', b'gid', b'mtime', b'atime', b'ctime',
|
b'mode', b'user', b'group', b'uid', b'gid', b'mtime', b'atime', b'ctime',
|
||||||
b'xattrs', b'bsdflags', b'acl_nfs4', b'acl_access', b'acl_default', b'acl_extended',
|
b'xattrs', b'bsdflags', b'acl_nfs4', b'acl_access', b'acl_default', b'acl_extended', ])
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
class RobustUnpacker:
|
class RobustUnpacker:
|
||||||
|
|
|
@ -239,8 +239,7 @@ class Archiver:
|
||||||
# Ignore if nodump flag is set
|
# Ignore if nodump flag is set
|
||||||
if has_lchflags and (st.st_flags & stat.UF_NODUMP):
|
if has_lchflags and (st.st_flags & stat.UF_NODUMP):
|
||||||
return
|
return
|
||||||
if (stat.S_ISREG(st.st_mode) or
|
if stat.S_ISREG(st.st_mode) or read_special and not stat.S_ISDIR(st.st_mode):
|
||||||
read_special and not stat.S_ISDIR(st.st_mode)):
|
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
try:
|
try:
|
||||||
status = archive.process_file(path, st, cache)
|
status = archive.process_file(path, st, cache)
|
||||||
|
@ -576,7 +575,7 @@ class Archiver:
|
||||||
archive = Archive(repository, key, manifest, args.location.archive)
|
archive = Archive(repository, key, manifest, args.location.archive)
|
||||||
for i, item_id in enumerate(archive.metadata[b'items']):
|
for i, item_id in enumerate(archive.metadata[b'items']):
|
||||||
data = key.decrypt(item_id, repository.get(item_id))
|
data = key.decrypt(item_id, repository.get(item_id))
|
||||||
filename = '%06d_%s.items' %(i, hexlify(item_id).decode('ascii'))
|
filename = '%06d_%s.items' % (i, hexlify(item_id).decode('ascii'))
|
||||||
print('Dumping', filename)
|
print('Dumping', filename)
|
||||||
with open(filename, 'wb') as fd:
|
with open(filename, 'wb') as fd:
|
||||||
fd.write(data)
|
fd.write(data)
|
||||||
|
@ -594,7 +593,7 @@ class Archiver:
|
||||||
print("object id %s is invalid." % hex_id)
|
print("object id %s is invalid." % hex_id)
|
||||||
else:
|
else:
|
||||||
try:
|
try:
|
||||||
data =repository.get(id)
|
data = repository.get(id)
|
||||||
except repository.ObjectNotFound:
|
except repository.ObjectNotFound:
|
||||||
print("object %s not found." % hex_id)
|
print("object %s not found." % hex_id)
|
||||||
else:
|
else:
|
||||||
|
@ -756,7 +755,7 @@ class Archiver:
|
||||||
|
|
||||||
def preprocess_args(self, args):
|
def preprocess_args(self, args):
|
||||||
deprecations = [
|
deprecations = [
|
||||||
#('--old', '--new', 'Warning: "--old" has been deprecated. Use "--new" instead.'),
|
# ('--old', '--new', 'Warning: "--old" has been deprecated. Use "--new" instead.'),
|
||||||
]
|
]
|
||||||
for i, arg in enumerate(args[:]):
|
for i, arg in enumerate(args[:]):
|
||||||
for old_name, new_name, warning in deprecations:
|
for old_name, new_name, warning in deprecations:
|
||||||
|
@ -787,8 +786,7 @@ class Archiver:
|
||||||
parser = argparse.ArgumentParser(prog=prog, description='Borg - Deduplicated Backups')
|
parser = argparse.ArgumentParser(prog=prog, description='Borg - Deduplicated Backups')
|
||||||
parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__,
|
parser.add_argument('-V', '--version', action='version', version='%(prog)s ' + __version__,
|
||||||
help='show version number and exit')
|
help='show version number and exit')
|
||||||
subparsers = parser.add_subparsers(title='required arguments',
|
subparsers = parser.add_subparsers(title='required arguments', metavar='<command>')
|
||||||
metavar='<command>')
|
|
||||||
|
|
||||||
serve_epilog = textwrap.dedent("""
|
serve_epilog = textwrap.dedent("""
|
||||||
This command starts a repository server process. This command is usually not used manually.
|
This command starts a repository server process. This command is usually not used manually.
|
||||||
|
|
|
@ -130,10 +130,10 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
|
||||||
try:
|
try:
|
||||||
cache_version = self.config.getint('cache', 'version')
|
cache_version = self.config.getint('cache', 'version')
|
||||||
wanted_version = 1
|
wanted_version = 1
|
||||||
if cache_version != wanted_version:
|
if cache_version != wanted_version:
|
||||||
raise Exception('%s has unexpected cache version %d (wanted: %d).' % (
|
raise Exception('%s has unexpected cache version %d (wanted: %d).' % (
|
||||||
config_path, cache_version, wanted_version))
|
config_path, cache_version, wanted_version))
|
||||||
except configparser.NoSectionError as e:
|
except configparser.NoSectionError:
|
||||||
raise Exception('%s does not look like a Borg cache.' % config_path) from None
|
raise Exception('%s does not look like a Borg cache.' % config_path) from None
|
||||||
self.id = self.config.get('cache', 'repository')
|
self.id = self.config.get('cache', 'repository')
|
||||||
self.manifest_id = unhexlify(self.config.get('cache', 'manifest'))
|
self.manifest_id = unhexlify(self.config.get('cache', 'manifest'))
|
||||||
|
|
|
@ -209,7 +209,7 @@ class FuseOperations(llfuse.Operations):
|
||||||
continue
|
continue
|
||||||
n = min(size, s - offset)
|
n = min(size, s - offset)
|
||||||
chunk = self.key.decrypt(id, self.repository.get(id))
|
chunk = self.key.decrypt(id, self.repository.get(id))
|
||||||
parts.append(chunk[offset:offset+n])
|
parts.append(chunk[offset:offset + n])
|
||||||
offset = 0
|
offset = 0
|
||||||
size -= n
|
size -= n
|
||||||
if not size:
|
if not size:
|
||||||
|
|
|
@ -128,7 +128,7 @@ class Manifest:
|
||||||
|
|
||||||
|
|
||||||
def prune_within(archives, within):
|
def prune_within(archives, within):
|
||||||
multiplier = {'H': 1, 'd': 24, 'w': 24*7, 'm': 24*31, 'y': 24*365}
|
multiplier = {'H': 1, 'd': 24, 'w': 24 * 7, 'm': 24 * 31, 'y': 24 * 365}
|
||||||
try:
|
try:
|
||||||
hours = int(within[:-1]) * multiplier[within[-1]]
|
hours = int(within[:-1]) * multiplier[within[-1]]
|
||||||
except (KeyError, ValueError):
|
except (KeyError, ValueError):
|
||||||
|
@ -136,7 +136,7 @@ def prune_within(archives, within):
|
||||||
raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
|
raise argparse.ArgumentTypeError('Unable to parse --within option: "%s"' % within)
|
||||||
if hours <= 0:
|
if hours <= 0:
|
||||||
raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
|
raise argparse.ArgumentTypeError('Number specified using --within option must be positive')
|
||||||
target = datetime.now(timezone.utc) - timedelta(seconds=hours*60*60)
|
target = datetime.now(timezone.utc) - timedelta(seconds=hours * 3600)
|
||||||
return [a for a in archives if a.ts > target]
|
return [a for a in archives if a.ts > target]
|
||||||
|
|
||||||
|
|
||||||
|
@ -200,7 +200,7 @@ class Statistics:
|
||||||
path = remove_surrogates(item[b'path']) if item else ''
|
path = remove_surrogates(item[b'path']) if item else ''
|
||||||
space = columns - len(msg)
|
space = columns - len(msg)
|
||||||
if space < len('...') + len(path):
|
if space < len('...') + len(path):
|
||||||
path = '%s...%s' % (path[:(space//2)-len('...')], path[-space//2:])
|
path = '%s...%s' % (path[:(space // 2) - len('...')], path[-space // 2:])
|
||||||
msg += "{0:<{space}}".format(path, space=space)
|
msg += "{0:<{space}}".format(path, space=space)
|
||||||
else:
|
else:
|
||||||
msg = ' ' * columns
|
msg = ' ' * columns
|
||||||
|
@ -355,7 +355,7 @@ class FnmatchPattern(PatternBase):
|
||||||
if pattern.endswith(os.path.sep):
|
if pattern.endswith(os.path.sep):
|
||||||
pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep + '*' + os.path.sep
|
pattern = os.path.normpath(pattern).rstrip(os.path.sep) + os.path.sep + '*' + os.path.sep
|
||||||
else:
|
else:
|
||||||
pattern = os.path.normpath(pattern) + os.path.sep+'*'
|
pattern = os.path.normpath(pattern) + os.path.sep + '*'
|
||||||
|
|
||||||
self.pattern = pattern
|
self.pattern = pattern
|
||||||
|
|
||||||
|
@ -831,6 +831,7 @@ FALSISH = ('No', 'NO', 'no', 'N', 'n', '0', )
|
||||||
TRUISH = ('Yes', 'YES', 'yes', 'Y', 'y', '1', )
|
TRUISH = ('Yes', 'YES', 'yes', 'Y', 'y', '1', )
|
||||||
DEFAULTISH = ('Default', 'DEFAULT', 'default', 'D', 'd', '', )
|
DEFAULTISH = ('Default', 'DEFAULT', 'default', 'D', 'd', '', )
|
||||||
|
|
||||||
|
|
||||||
def yes(msg=None, false_msg=None, true_msg=None, default_msg=None,
|
def yes(msg=None, false_msg=None, true_msg=None, default_msg=None,
|
||||||
retry_msg=None, invalid_msg=None, env_msg=None,
|
retry_msg=None, invalid_msg=None, env_msg=None,
|
||||||
falsish=FALSISH, truish=TRUISH, defaultish=DEFAULTISH,
|
falsish=FALSISH, truish=TRUISH, defaultish=DEFAULTISH,
|
||||||
|
@ -951,7 +952,6 @@ class ProgressIndicatorPercent:
|
||||||
print(" " * len(self.msg % 100.0), file=self.file, end='\r')
|
print(" " * len(self.msg % 100.0), file=self.file, end='\r')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class ProgressIndicatorEndless:
|
class ProgressIndicatorEndless:
|
||||||
def __init__(self, step=10, file=sys.stderr):
|
def __init__(self, step=10, file=sys.stderr):
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -360,7 +360,7 @@ class KeyfileKey(KeyfileKeyBase):
|
||||||
filename = os.path.join(keys_dir, name)
|
filename = os.path.join(keys_dir, name)
|
||||||
with open(filename, 'r') as fd:
|
with open(filename, 'r') as fd:
|
||||||
line = fd.readline().strip()
|
line = fd.readline().strip()
|
||||||
if line.startswith(self.FILE_ID) and line[len(self.FILE_ID)+1:] == id:
|
if line.startswith(self.FILE_ID) and line[len(self.FILE_ID) + 1:] == id:
|
||||||
return filename
|
return filename
|
||||||
raise KeyfileNotFoundError(self.repository._location.canonical_path(), get_keys_dir())
|
raise KeyfileNotFoundError(self.repository._location.canonical_path(), get_keys_dir())
|
||||||
|
|
||||||
|
|
|
@ -111,7 +111,7 @@ class ExclusiveLock:
|
||||||
self.sleep = sleep
|
self.sleep = sleep
|
||||||
self.path = os.path.abspath(path)
|
self.path = os.path.abspath(path)
|
||||||
self.id = id or get_id()
|
self.id = id or get_id()
|
||||||
self.unique_name = os.path.join(self.path, "%s.%d-%x" % self.id)
|
self.unique_name = os.path.join(self.path, "%s.%d-%x" % self.id)
|
||||||
|
|
||||||
def __enter__(self):
|
def __enter__(self):
|
||||||
return self.acquire()
|
return self.acquire()
|
||||||
|
|
|
@ -185,7 +185,7 @@ class RemoteRepository:
|
||||||
else:
|
else:
|
||||||
raise ValueError('log level missing, fix this code')
|
raise ValueError('log level missing, fix this code')
|
||||||
if testing:
|
if testing:
|
||||||
return [sys.executable, '-m', 'borg.archiver', 'serve' ] + opts + self.extra_test_args
|
return [sys.executable, '-m', 'borg.archiver', 'serve'] + opts + self.extra_test_args
|
||||||
else: # pragma: no cover
|
else: # pragma: no cover
|
||||||
return [args.remote_path, 'serve'] + opts
|
return [args.remote_path, 'serve'] + opts
|
||||||
|
|
||||||
|
|
|
@ -82,6 +82,7 @@ def cmd(request):
|
||||||
exe = 'borg.exe'
|
exe = 'borg.exe'
|
||||||
else:
|
else:
|
||||||
raise ValueError("param must be 'python' or 'binary'")
|
raise ValueError("param must be 'python' or 'binary'")
|
||||||
|
|
||||||
def exec_fn(*args, **kw):
|
def exec_fn(*args, **kw):
|
||||||
return exec_cmd(*args, exe=exe, fork=True, **kw)
|
return exec_cmd(*args, exe=exe, fork=True, **kw)
|
||||||
return exec_fn
|
return exec_fn
|
||||||
|
@ -121,6 +122,7 @@ if the directory does not exist, the test will be skipped.
|
||||||
"""
|
"""
|
||||||
DF_MOUNT = '/tmp/borg-mount'
|
DF_MOUNT = '/tmp/borg-mount'
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(not os.path.exists(DF_MOUNT), reason="needs a 16MB fs mounted on %s" % DF_MOUNT)
|
@pytest.mark.skipif(not os.path.exists(DF_MOUNT), reason="needs a 16MB fs mounted on %s" % DF_MOUNT)
|
||||||
def test_disk_full(cmd):
|
def test_disk_full(cmd):
|
||||||
def make_files(dir, count, size, rnd=True):
|
def make_files(dir, count, size, rnd=True):
|
||||||
|
@ -177,7 +179,7 @@ def test_disk_full(cmd):
|
||||||
shutil.rmtree(reserve, ignore_errors=True)
|
shutil.rmtree(reserve, ignore_errors=True)
|
||||||
rc, out = cmd('list', repo)
|
rc, out = cmd('list', repo)
|
||||||
if rc != EXIT_SUCCESS:
|
if rc != EXIT_SUCCESS:
|
||||||
print('list', rc, out)
|
print('list', rc, out)
|
||||||
rc, out = cmd('check', '--repair', repo)
|
rc, out = cmd('check', '--repair', repo)
|
||||||
if rc != EXIT_SUCCESS:
|
if rc != EXIT_SUCCESS:
|
||||||
print('check', rc, out)
|
print('check', rc, out)
|
||||||
|
@ -301,7 +303,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
||||||
list_output = self.cmd('list', '--short', self.repository_location)
|
list_output = self.cmd('list', '--short', self.repository_location)
|
||||||
self.assert_in('test', list_output)
|
self.assert_in('test', list_output)
|
||||||
self.assert_in('test.2', list_output)
|
self.assert_in('test.2', list_output)
|
||||||
expected = [
|
expected = [
|
||||||
'input',
|
'input',
|
||||||
'input/bdev',
|
'input/bdev',
|
||||||
'input/cdev',
|
'input/cdev',
|
||||||
|
@ -320,7 +322,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
||||||
expected.remove('input/cdev')
|
expected.remove('input/cdev')
|
||||||
if has_lchflags:
|
if has_lchflags:
|
||||||
# remove the file we did not backup, so input and output become equal
|
# remove the file we did not backup, so input and output become equal
|
||||||
expected.remove('input/flagfile') # this file is UF_NODUMP
|
expected.remove('input/flagfile') # this file is UF_NODUMP
|
||||||
os.remove(os.path.join('input', 'flagfile'))
|
os.remove(os.path.join('input', 'flagfile'))
|
||||||
list_output = self.cmd('list', '--short', self.repository_location + '::test')
|
list_output = self.cmd('list', '--short', self.repository_location + '::test')
|
||||||
for name in expected:
|
for name in expected:
|
||||||
|
@ -348,7 +350,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
||||||
self.assert_equal(filter(info_output), filter(info_output2))
|
self.assert_equal(filter(info_output), filter(info_output2))
|
||||||
|
|
||||||
def test_atime(self):
|
def test_atime(self):
|
||||||
have_root = self.create_test_files()
|
self.create_test_files()
|
||||||
atime, mtime = 123456780, 234567890
|
atime, mtime = 123456780, 234567890
|
||||||
os.utime('input/file1', (atime, mtime))
|
os.utime('input/file1', (atime, mtime))
|
||||||
self.cmd('init', self.repository_location)
|
self.cmd('init', self.repository_location)
|
||||||
|
@ -414,7 +416,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
||||||
filenames = ['normal', 'with some blanks', '(with_parens)', ]
|
filenames = ['normal', 'with some blanks', '(with_parens)', ]
|
||||||
for filename in filenames:
|
for filename in filenames:
|
||||||
filename = os.path.join(self.input_path, filename)
|
filename = os.path.join(self.input_path, filename)
|
||||||
with open(filename, 'wb') as fd:
|
with open(filename, 'wb'):
|
||||||
pass
|
pass
|
||||||
self.cmd('init', self.repository_location)
|
self.cmd('init', self.repository_location)
|
||||||
self.cmd('create', self.repository_location + '::test', 'input')
|
self.cmd('create', self.repository_location + '::test', 'input')
|
||||||
|
@ -617,11 +619,11 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
||||||
self.create_regular_file('tagged1/file1', size=1024)
|
self.create_regular_file('tagged1/file1', size=1024)
|
||||||
self.create_regular_file('tagged2/.NOBACKUP2')
|
self.create_regular_file('tagged2/.NOBACKUP2')
|
||||||
self.create_regular_file('tagged2/file2', size=1024)
|
self.create_regular_file('tagged2/file2', size=1024)
|
||||||
self.create_regular_file('tagged3/CACHEDIR.TAG', contents = b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
|
self.create_regular_file('tagged3/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
|
||||||
self.create_regular_file('tagged3/file3', size=1024)
|
self.create_regular_file('tagged3/file3', size=1024)
|
||||||
self.create_regular_file('taggedall/.NOBACKUP1')
|
self.create_regular_file('taggedall/.NOBACKUP1')
|
||||||
self.create_regular_file('taggedall/.NOBACKUP2')
|
self.create_regular_file('taggedall/.NOBACKUP2')
|
||||||
self.create_regular_file('taggedall/CACHEDIR.TAG', contents = b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
|
self.create_regular_file('taggedall/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
|
||||||
self.create_regular_file('taggedall/file4', size=1024)
|
self.create_regular_file('taggedall/file4', size=1024)
|
||||||
self.cmd('create', '--exclude-if-present', '.NOBACKUP1', '--exclude-if-present', '.NOBACKUP2',
|
self.cmd('create', '--exclude-if-present', '.NOBACKUP1', '--exclude-if-present', '.NOBACKUP2',
|
||||||
'--exclude-caches', '--keep-tag-files', self.repository_location + '::test', 'input')
|
'--exclude-caches', '--keep-tag-files', self.repository_location + '::test', 'input')
|
||||||
|
@ -785,7 +787,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
||||||
clearly incomplete: only tests for the weird "unchanged" status for now"""
|
clearly incomplete: only tests for the weird "unchanged" status for now"""
|
||||||
now = time.time()
|
now = time.time()
|
||||||
self.create_regular_file('file1', size=1024 * 80)
|
self.create_regular_file('file1', size=1024 * 80)
|
||||||
os.utime('input/file1', (now - 5, now - 5)) # 5 seconds ago
|
os.utime('input/file1', (now - 5, now - 5)) # 5 seconds ago
|
||||||
self.create_regular_file('file2', size=1024 * 80)
|
self.create_regular_file('file2', size=1024 * 80)
|
||||||
self.cmd('init', self.repository_location)
|
self.cmd('init', self.repository_location)
|
||||||
output = self.cmd('create', '-v', '--list', self.repository_location + '::test', 'input')
|
output = self.cmd('create', '-v', '--list', self.repository_location + '::test', 'input')
|
||||||
|
@ -822,7 +824,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
||||||
output = self.cmd('create', '-v', '--list', '--filter=AM', self.repository_location + '::test3', 'input')
|
output = self.cmd('create', '-v', '--list', '--filter=AM', self.repository_location + '::test3', 'input')
|
||||||
self.assert_in('file1', output)
|
self.assert_in('file1', output)
|
||||||
|
|
||||||
#def test_cmdline_compatibility(self):
|
# def test_cmdline_compatibility(self):
|
||||||
# self.create_regular_file('file1', size=1024 * 80)
|
# self.create_regular_file('file1', size=1024 * 80)
|
||||||
# self.cmd('init', self.repository_location)
|
# self.cmd('init', self.repository_location)
|
||||||
# self.cmd('create', self.repository_location + '::test', 'input')
|
# self.cmd('create', self.repository_location + '::test', 'input')
|
||||||
|
|
|
@ -38,12 +38,14 @@ def testdata(request, tmpdir_factory):
|
||||||
data_type = request.param
|
data_type = request.param
|
||||||
if data_type == 'zeros':
|
if data_type == 'zeros':
|
||||||
# do not use a binary zero (\0) to avoid sparse detection
|
# do not use a binary zero (\0) to avoid sparse detection
|
||||||
data = lambda: b'0' * size
|
def data(size):
|
||||||
|
return b'0' * size
|
||||||
if data_type == 'random':
|
if data_type == 'random':
|
||||||
data = lambda: os.urandom(size)
|
def data(size):
|
||||||
|
return os.urandom(size)
|
||||||
for i in range(count):
|
for i in range(count):
|
||||||
with open(str(p.join(str(i))), "wb") as f:
|
with open(str(p.join(str(i))), "wb") as f:
|
||||||
f.write(data())
|
f.write(data(size))
|
||||||
yield str(p)
|
yield str(p)
|
||||||
p.remove(rec=1)
|
p.remove(rec=1)
|
||||||
|
|
||||||
|
@ -95,4 +97,3 @@ def test_check(benchmark, cmd, archive):
|
||||||
def test_help(benchmark, cmd):
|
def test_help(benchmark, cmd):
|
||||||
result, out = benchmark(cmd, 'help')
|
result, out = benchmark(cmd, 'help')
|
||||||
assert result == 0
|
assert result == 0
|
||||||
|
|
||||||
|
|
|
@ -98,5 +98,3 @@ def test_compressor():
|
||||||
for params in params_list:
|
for params in params_list:
|
||||||
c = Compressor(**params)
|
c = Compressor(**params)
|
||||||
assert data == c.decompress(c.compress(data))
|
assert data == c.decompress(c.compress(data))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -165,8 +165,7 @@ class FormatTimedeltaTestCase(BaseTestCase):
|
||||||
def check_patterns(files, pattern, expected):
|
def check_patterns(files, pattern, expected):
|
||||||
"""Utility for testing patterns.
|
"""Utility for testing patterns.
|
||||||
"""
|
"""
|
||||||
assert all([f == os.path.normpath(f) for f in files]), \
|
assert all([f == os.path.normpath(f) for f in files]), "Pattern matchers expect normalized input paths"
|
||||||
"Pattern matchers expect normalized input paths"
|
|
||||||
|
|
||||||
matched = [f for f in files if pattern.match(f)]
|
matched = [f for f in files if pattern.match(f)]
|
||||||
|
|
||||||
|
@ -284,7 +283,7 @@ def test_patterns_shell(pattern, expected):
|
||||||
("^[^/]", []),
|
("^[^/]", []),
|
||||||
("^(?!/srv|/foo|/opt)",
|
("^(?!/srv|/foo|/opt)",
|
||||||
["/home", "/home/user/.profile", "/home/user/.bashrc", "/home/user2/.profile",
|
["/home", "/home/user/.profile", "/home/user/.bashrc", "/home/user2/.profile",
|
||||||
"/home/user2/public_html/index.html", "/home/foo/.thumbnails", "/home/foo/bar/.thumbnails",]),
|
"/home/user2/public_html/index.html", "/home/foo/.thumbnails", "/home/foo/bar/.thumbnails", ]),
|
||||||
])
|
])
|
||||||
def test_patterns_regex(pattern, expected):
|
def test_patterns_regex(pattern, expected):
|
||||||
files = [
|
files = [
|
||||||
|
|
|
@ -9,6 +9,7 @@ from ..locking import get_id, TimeoutTimer, ExclusiveLock, UpgradableLock, LockR
|
||||||
ID1 = "foo", 1, 1
|
ID1 = "foo", 1, 1
|
||||||
ID2 = "bar", 2, 2
|
ID2 = "bar", 2, 2
|
||||||
|
|
||||||
|
|
||||||
def test_id():
|
def test_id():
|
||||||
hostname, pid, tid = get_id()
|
hostname, pid, tid = get_id()
|
||||||
assert isinstance(hostname, str)
|
assert isinstance(hostname, str)
|
||||||
|
|
|
@ -338,7 +338,7 @@ class RemoteRepositoryTestCase(RepositoryTestCase):
|
||||||
remote_path = 'borg'
|
remote_path = 'borg'
|
||||||
umask = 0o077
|
umask = 0o077
|
||||||
|
|
||||||
assert self.repository.borg_cmd(None, testing=True) == [sys.executable, '-m', 'borg.archiver', 'serve' ]
|
assert self.repository.borg_cmd(None, testing=True) == [sys.executable, '-m', 'borg.archiver', 'serve']
|
||||||
args = MockArgs()
|
args = MockArgs()
|
||||||
# note: test logger is on info log level, so --info gets added automagically
|
# note: test logger is on info log level, so --info gets added automagically
|
||||||
assert self.repository.borg_cmd(args, testing=False) == ['borg', 'serve', '--umask=077', '--info']
|
assert self.repository.borg_cmd(args, testing=False) == ['borg', 'serve', '--umask=077', '--info']
|
||||||
|
|
|
@ -321,6 +321,6 @@ class Borg0xxKeyfileKey(KeyfileKey):
|
||||||
filename = os.path.join(keys_dir, name)
|
filename = os.path.join(keys_dir, name)
|
||||||
with open(filename, 'r') as fd:
|
with open(filename, 'r') as fd:
|
||||||
line = fd.readline().strip()
|
line = fd.readline().strip()
|
||||||
if line and line.startswith(cls.FILE_ID) and line[len(cls.FILE_ID)+1:] == id:
|
if line and line.startswith(cls.FILE_ID) and line[len(cls.FILE_ID) + 1:] == id:
|
||||||
return filename
|
return filename
|
||||||
raise KeyfileNotFoundError(repository.path, keys_dir)
|
raise KeyfileNotFoundError(repository.path, keys_dir)
|
||||||
|
|
|
@ -231,8 +231,8 @@ elif sys.platform.startswith('freebsd'): # pragma: freebsd only
|
||||||
mv = memoryview(namebuf.raw)
|
mv = memoryview(namebuf.raw)
|
||||||
while mv:
|
while mv:
|
||||||
length = mv[0]
|
length = mv[0]
|
||||||
names.append(os.fsdecode(bytes(mv[1:1+length])))
|
names.append(os.fsdecode(bytes(mv[1:1 + length])))
|
||||||
mv = mv[1+length:]
|
mv = mv[1 + length:]
|
||||||
return names
|
return names
|
||||||
|
|
||||||
def getxattr(path, name, *, follow_symlinks=True):
|
def getxattr(path, name, *, follow_symlinks=True):
|
||||||
|
|
10
setup.cfg
10
setup.cfg
|
@ -2,5 +2,11 @@
|
||||||
python_files = testsuite/*.py
|
python_files = testsuite/*.py
|
||||||
|
|
||||||
[flake8]
|
[flake8]
|
||||||
max-line-length = 120
|
# please note that the values are adjusted so that they do not cause failures
|
||||||
exclude = build,dist,.git,.idea,.cache,.tox
|
# with existing code. if you want to change them, you should first fix all
|
||||||
|
# flake8 failures that appear with your change.
|
||||||
|
ignore = E122,E123,E125,E126,E127,E128,E226,E402,F401,F811
|
||||||
|
# line length long term target: 120
|
||||||
|
max-line-length = 255
|
||||||
|
exclude = build,dist,.git,.idea,.cache,.tox,docs/conf.py
|
||||||
|
|
||||||
|
|
5
setup.py
5
setup.py
|
@ -19,7 +19,7 @@ on_rtd = os.environ.get('READTHEDOCS')
|
||||||
|
|
||||||
# msgpack pure python data corruption was fixed in 0.4.6.
|
# msgpack pure python data corruption was fixed in 0.4.6.
|
||||||
# Also, we might use some rather recent API features.
|
# Also, we might use some rather recent API features.
|
||||||
install_requires=['msgpack-python>=0.4.6', ]
|
install_requires = ['msgpack-python>=0.4.6', ]
|
||||||
|
|
||||||
|
|
||||||
from setuptools import setup, Extension
|
from setuptools import setup, Extension
|
||||||
|
@ -120,12 +120,14 @@ elif not on_rtd:
|
||||||
with open('README.rst', 'r') as fd:
|
with open('README.rst', 'r') as fd:
|
||||||
long_description = fd.read()
|
long_description = fd.read()
|
||||||
|
|
||||||
|
|
||||||
class build_usage(Command):
|
class build_usage(Command):
|
||||||
description = "generate usage for each command"
|
description = "generate usage for each command"
|
||||||
|
|
||||||
user_options = [
|
user_options = [
|
||||||
('output=', 'O', 'output directory'),
|
('output=', 'O', 'output directory'),
|
||||||
]
|
]
|
||||||
|
|
||||||
def initialize_options(self):
|
def initialize_options(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -172,6 +174,7 @@ class build_api(Command):
|
||||||
user_options = [
|
user_options = [
|
||||||
('output=', 'O', 'output directory'),
|
('output=', 'O', 'output directory'),
|
||||||
]
|
]
|
||||||
|
|
||||||
def initialize_options(self):
|
def initialize_options(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
7
tox.ini
7
tox.ini
|
@ -2,7 +2,7 @@
|
||||||
# fakeroot -u tox --recreate
|
# fakeroot -u tox --recreate
|
||||||
|
|
||||||
[tox]
|
[tox]
|
||||||
envlist = py{34,35}
|
envlist = py{34,35},flake8
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
# Change dir to avoid import problem for cython code. The directory does
|
# Change dir to avoid import problem for cython code. The directory does
|
||||||
|
@ -14,3 +14,8 @@ deps =
|
||||||
commands = py.test --cov=borg --cov-config=../.coveragerc --benchmark-skip --pyargs {posargs:borg.testsuite}
|
commands = py.test --cov=borg --cov-config=../.coveragerc --benchmark-skip --pyargs {posargs:borg.testsuite}
|
||||||
# fakeroot -u needs some env vars:
|
# fakeroot -u needs some env vars:
|
||||||
passenv = *
|
passenv = *
|
||||||
|
|
||||||
|
[testenv:flake8]
|
||||||
|
changedir =
|
||||||
|
deps = flake8
|
||||||
|
commands = flake8
|
||||||
|
|
Loading…
Add table
Reference in a new issue