1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2024-12-24 08:45:13 +00:00

cleanup pep8 issues found by pycharm

This commit is contained in:
Thomas Waldmann 2015-03-17 23:47:21 +01:00
parent 3a33d57c7d
commit 4ef6125f45
14 changed files with 47 additions and 33 deletions

View file

@ -17,6 +17,7 @@
import subprocess
import sys
def run_command(args, cwd=None, verbose=False):
try:
# remember shell=False, so use git.cmd on windows, not just git
@ -41,6 +42,7 @@ def run_command(args, cwd=None, verbose=False):
import re
import os.path
def get_expanded_variables(versionfile_source):
# the code embedded in _version.py can just fetch the value of these
# variables. When used from setup.py, we don't want to import
@ -48,7 +50,7 @@ def get_expanded_variables(versionfile_source):
# used from _version.py.
variables = {}
try:
for line in open(versionfile_source,"r").readlines():
for line in open(versionfile_source, "r").readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
@ -61,12 +63,13 @@ def get_expanded_variables(versionfile_source):
pass
return variables
def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
refnames = variables["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("variables are unexpanded, not using")
return {} # unexpanded, so not in an unpacked git-archive tarball
return {} # unexpanded, so not in an unpacked git-archive tarball
refs = set([r.strip() for r in refnames.strip("()").split(",")])
for ref in list(refs):
if not re.search(r'\d', ref):
@ -87,13 +90,14 @@ def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return { "version": r,
"full": variables["full"].strip() }
return {"version": r,
"full": variables["full"].strip()}
# no suitable tags, so we use the full revision id
if verbose:
print("no suitable tags, using full revision id")
return { "version": variables["full"].strip(),
"full": variables["full"].strip() }
return {"version": variables["full"].strip(),
"full": variables["full"].strip()}
def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
# this runs 'git' from the root of the source tree. That either means
@ -110,7 +114,7 @@ def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
here = os.path.abspath(__file__)
except NameError:
# some py2exe/bbfreeze/non-CPython implementations don't do __file__
return {} # not always correct
return {} # not always correct
# versionfile_source is the relative path from the top of the source tree
# (where the .git directory might live) to this file. Invert this to find
@ -157,7 +161,7 @@ def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False)
here = os.path.abspath(__file__)
except NameError:
# py2exe/bbfreeze/non-CPython don't have __file__
return {} # without __file__, we have no hope
return {} # without __file__, we have no hope
# versionfile_source is the relative path from the top of the source
# tree to _version.py. Invert this to find the root from __file__.
root = here
@ -183,8 +187,9 @@ def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False)
parentdir_prefix = "Attic-"
versionfile_source = "attic/_version.py"
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
variables = { "refnames": git_refnames, "full": git_full }
variables = {"refnames": git_refnames, "full": git_full}
ver = versions_from_expanded_variables(variables, tag_prefix, verbose)
if not ver:
ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)

View file

@ -141,7 +141,7 @@ def __init__(self, repository, key, manifest, name, cache=None, create=False,
i = 0
while True:
self.checkpoint_name = '%s.checkpoint%s' % (name, i and ('.%d' % i) or '')
if not self.checkpoint_name in manifest.archives:
if self.checkpoint_name not in manifest.archives:
break
i += 1
else:
@ -211,6 +211,7 @@ def add(id):
count, size, csize = self.cache.chunks[id]
stats.update(size, csize, count == 1)
self.cache.chunks[id] = count - 1, size, csize
def add_file_chunks(chunks):
for id, _, _ in chunks:
add(id)
@ -535,7 +536,7 @@ def check(self, repository, repair=False):
self.repository = repository
self.init_chunks()
self.key = self.identify_key(repository)
if not Manifest.MANIFEST_ID in self.chunks:
if Manifest.MANIFEST_ID not in self.chunks:
self.manifest = self.rebuild_manifest()
else:
self.manifest, _ = Manifest.load(repository, key=self.key)
@ -583,7 +584,7 @@ def rebuild_manifest(self):
# Some basic sanity checks of the payload before feeding it into msgpack
if len(data) < 2 or ((data[0] & 0xf0) != 0x80) or ((data[1] & 0xe0) != 0xa0):
continue
if not b'cmdline' in data or not b'\xa7version\x01' in data:
if b'cmdline' not in data or b'\xa7version\x01' not in data:
continue
try:
archive = msgpack.unpackb(data)
@ -632,7 +633,7 @@ def verify_file_chunks(item):
offset = 0
chunk_list = []
for chunk_id, size, csize in item[b'chunks']:
if not chunk_id in self.chunks:
if chunk_id not in self.chunks:
# If a file chunk is missing, create an all empty replacement chunk
self.report_progress('{}: Missing file chunk detected (Byte {}-{})'.format(item[b'path'].decode('utf-8', 'surrogateescape'), offset, offset + size), error=True)
data = bytes(size)
@ -653,11 +654,13 @@ def robust_iterator(archive):
"""
unpacker = RobustUnpacker(lambda item: isinstance(item, dict) and b'path' in item)
_state = 0
def missing_chunk_detector(chunk_id):
nonlocal _state
if _state % 2 != int(not chunk_id in self.chunks):
if _state % 2 != int(chunk_id not in self.chunks):
_state += 1
return _state
for state, items in groupby(archive[b'items'], missing_chunk_detector):
items = list(items)
if state % 2:
@ -675,7 +678,7 @@ def missing_chunk_detector(chunk_id):
for i, (name, info) in enumerate(list(self.manifest.archives.items()), 1):
self.report_progress('Analyzing archive {} ({}/{})'.format(name, i, num_archives))
archive_id = info[b'id']
if not archive_id in self.chunks:
if archive_id not in self.chunks:
self.report_progress('Archive metadata block is missing', error=True)
del self.manifest.archives[name]
continue

View file

@ -290,9 +290,10 @@ def do_list(self, args):
extra = ' link to %s' % item[b'source']
else:
extra = ''
print('%s%s %-6s %-6s %8d %s %s%s' % (type, mode, item[b'user'] or item[b'uid'],
item[b'group'] or item[b'gid'], size, mtime,
remove_surrogates(item[b'path']), extra))
print('%s%s %-6s %-6s %8d %s %s%s' % (
type, mode, item[b'user'] or item[b'uid'],
item[b'group'] or item[b'gid'], size, mtime,
remove_surrogates(item[b'path']), extra))
else:
for archive in sorted(Archive.list_archives(repository, key, manifest), key=attrgetter('ts')):
print(format_archive(archive))
@ -445,8 +446,8 @@ def run(self, args=None):
""").lstrip())
common_parser = argparse.ArgumentParser(add_help=False)
common_parser.add_argument('-v', '--verbose', dest='verbose', action='store_true',
default=False,
help='verbose output')
default=False,
help='verbose output')
# We can't use argparse for "serve" since we don't want it to show up in "Available commands"
if args:

View file

@ -102,7 +102,7 @@ def load(cls, repository, key=None):
m = msgpack.unpackb(data)
if not m.get(b'version') == 1:
raise ValueError('Invalid manifest version')
manifest.archives = dict((k.decode('utf-8'), v) for k,v in m[b'archives'].items())
manifest.archives = dict((k.decode('utf-8'), v) for k, v in m[b'archives'].items())
manifest.timestamp = m.get(b'timestamp')
if manifest.timestamp:
manifest.timestamp = manifest.timestamp.decode('ascii')
@ -145,7 +145,8 @@ def prune_split(archives, pattern, n, skip=[]):
last = period
if a not in skip:
keep.append(a)
if len(keep) == n: break
if len(keep) == n:
break
return keep

View file

@ -17,6 +17,7 @@ class UnsupportedPayloadError(Error):
"""Unsupported payload type {}. A newer version is required to access this repository.
"""
class HMAC(hmac.HMAC):
"""Workaround a bug in Python < 3.4 Where HMAC does not accept memoryviews
"""

View file

@ -13,5 +13,6 @@
def acl_get(path, item, numeric_owner=False):
pass
def acl_set(path, item, numeric_owner=False):
pass

View file

@ -130,6 +130,7 @@ def call(self, cmd, *args, **kw):
def call_many(self, cmd, calls, wait=True, is_preloaded=False):
if not calls:
return
def fetch_from_cache(args):
msgid = self.cache[args].pop(0)
if not self.cache[args]:
@ -293,7 +294,7 @@ def get(self, key):
return next(self.get_many([key]))
def get_many(self, keys):
unknown_keys = [key for key in keys if not key in self.index]
unknown_keys = [key for key in keys if key not in self.index]
repository_iterator = zip(unknown_keys, self.repository.get_many(unknown_keys))
for key in keys:
try:

View file

@ -107,7 +107,7 @@ def open(self, path, exclusive):
raise self.DoesNotExist(path)
self.config = RawConfigParser()
self.config.read(os.path.join(self.path, 'config'))
if not 'repository' in self.config.sections() or self.config.getint('repository', 'version') != 1:
if 'repository' not in self.config.sections() or self.config.getint('repository', 'version') != 1:
raise self.InvalidRepository(path)
self.lock = UpgradableLock(os.path.join(path, 'config'), exclusive)
self.max_segment_size = self.config.getint('repository', 'max_segment_size')
@ -241,6 +241,7 @@ def check(self, repair=False):
the index is consistent with the data stored in the segments.
"""
error_found = False
def report_error(msg):
nonlocal error_found
error_found = True

View file

@ -200,8 +200,8 @@ def test_extract_include_exclude(self):
def test_exclude_caches(self):
self.attic('init', self.repository_location)
self.create_regular_file('file1', size=1024 * 80)
self.create_regular_file('cache1/CACHEDIR.TAG', contents = b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
self.create_regular_file('cache2/CACHEDIR.TAG', contents = b'invalid signature')
self.create_regular_file('cache1/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
self.create_regular_file('cache2/CACHEDIR.TAG', contents=b'invalid signature')
self.attic('create', '--exclude-caches', self.repository_location + '::test', 'input')
with changedir('output'):
self.attic('extract', self.repository_location + '::test')
@ -356,7 +356,7 @@ def verify_uniqueness():
for key, _ in repository.open_index(repository.get_transaction_id()).iteritems():
data = repository.get(key)
hash = sha256(data).digest()
if not hash in seen:
if hash not in seen:
seen.add(hash)
num_blocks = num_aes_blocks(len(data) - 41)
nonce = bytes_to_long(data[33:41])

View file

@ -14,11 +14,11 @@ def test_bytes_to_long(self):
def test_pbkdf2_sha256(self):
self.assert_equal(hexlify(pbkdf2_sha256(b'password', b'salt', 1, 32)),
b'120fb6cffcf8b32c43e7225256c4f837a86548c92ccc35480805987cb70be17b')
b'120fb6cffcf8b32c43e7225256c4f837a86548c92ccc35480805987cb70be17b')
self.assert_equal(hexlify(pbkdf2_sha256(b'password', b'salt', 2, 32)),
b'ae4d0c95af6b46d32d0adff928f06dd02a303f8ef3c251dfd6e2d85a95474c43')
b'ae4d0c95af6b46d32d0adff928f06dd02a303f8ef3c251dfd6e2d85a95474c43')
self.assert_equal(hexlify(pbkdf2_sha256(b'password', b'salt', 4096, 32)),
b'c5e478d59288c841aa530db6845c4c8d962893a001ce4e11a4963873aa98134a')
b'c5e478d59288c841aa530db6845c4c8d962893a001ce4e11a4963873aa98134a')
def test_get_random_bytes(self):
bytes = get_random_bytes(10)

View file

@ -151,7 +151,7 @@ def dotest(test_archives, n, skip, indices):
self.assert_equal(set(prune_split(ta, '%Y-%m', n, skip)),
subset(test_archives, indices))
test_pairs = [(1,1), (2,1), (2,28), (3,1), (3,2), (3,31), (5,1)]
test_pairs = [(1, 1), (2, 1), (2, 28), (3, 1), (3, 2), (3, 31), (5, 1)]
test_dates = [local_to_UTC(month, day) for month, day in test_pairs]
test_archives = [MockArchive(date) for date in test_dates]

View file

@ -159,7 +159,6 @@ def test_replay_of_readonly_repository(self):
self.assert_raises(UpgradableLock.WriteLockFailed, lambda: len(self.repository))
upgrade.assert_called_once()
def test_crash_before_write_index(self):
self.add_keys()
self.repository.write_index = None

View file

@ -4,6 +4,7 @@
from attic.testsuite import AtticTestCase
from attic.xattr import is_enabled, getxattr, setxattr, listxattr
@unittest.skipUnless(is_enabled(), 'xattr not enabled on filesystem')
class XattrTestCase(AtticTestCase):

View file

@ -7,7 +7,7 @@
versioneer.versionfile_source = 'attic/_version.py'
versioneer.versionfile_build = 'attic/_version.py'
versioneer.tag_prefix = ''
versioneer.parentdir_prefix = 'Attic-' # dirname like 'myproject-1.2.0'
versioneer.parentdir_prefix = 'Attic-' # dirname like 'myproject-1.2.0'
platform = os.uname()[0]