mirror of https://github.com/borgbackup/borg.git
commit
ddb8839076
|
@ -1 +1,3 @@
|
||||||
borg/_version.py export-subst
|
borg/_version.py export-subst
|
||||||
|
|
||||||
|
*.py diff=python
|
||||||
|
|
|
@ -18,7 +18,7 @@ if [[ "$(uname -s)" == 'Darwin' ]]; then
|
||||||
brew install xz # required for python lzma module
|
brew install xz # required for python lzma module
|
||||||
brew outdated pyenv || brew upgrade pyenv
|
brew outdated pyenv || brew upgrade pyenv
|
||||||
brew install pkg-config
|
brew install pkg-config
|
||||||
brew install Caskroom/versions/osxfuse-beta
|
brew install Caskroom/versions/osxfuse
|
||||||
|
|
||||||
case "${TOXENV}" in
|
case "${TOXENV}" in
|
||||||
py34)
|
py34)
|
||||||
|
|
|
@ -214,6 +214,16 @@ If you encounter issues, see also our `Vagrantfile` for details.
|
||||||
without external dependencies.
|
without external dependencies.
|
||||||
|
|
||||||
|
|
||||||
|
Merging maintenance branches
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
As mentioned above bug fixes will usually be merged into a maintenance branch (x.y-maint) and then
|
||||||
|
merged back into the master branch. Large diffs between these branches can make automatic merges troublesome,
|
||||||
|
therefore we recommend to use these merge parameters::
|
||||||
|
|
||||||
|
git merge 1.0-maint -s recursive -X rename-threshold=20%
|
||||||
|
|
||||||
|
|
||||||
Creating a new release
|
Creating a new release
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
|
|
|
@ -994,6 +994,9 @@ class ArchiveChecker:
|
||||||
self.repair = repair
|
self.repair = repair
|
||||||
self.repository = repository
|
self.repository = repository
|
||||||
self.init_chunks()
|
self.init_chunks()
|
||||||
|
if not self.chunks:
|
||||||
|
logger.error('Repository contains no apparent data at all, cannot continue check/repair.')
|
||||||
|
return False
|
||||||
self.key = self.identify_key(repository)
|
self.key = self.identify_key(repository)
|
||||||
if verify_data:
|
if verify_data:
|
||||||
self.verify_data()
|
self.verify_data()
|
||||||
|
@ -1260,11 +1263,21 @@ class ArchiveChecker:
|
||||||
self.error_found = True
|
self.error_found = True
|
||||||
logger.error(msg)
|
logger.error(msg)
|
||||||
|
|
||||||
|
def list_keys_safe(keys):
|
||||||
|
return ', '.join((k.decode() if isinstance(k, bytes) else str(k) for k in keys))
|
||||||
|
|
||||||
def valid_item(obj):
|
def valid_item(obj):
|
||||||
if not isinstance(obj, StableDict):
|
if not isinstance(obj, StableDict):
|
||||||
return False
|
return False, 'not a dictionary'
|
||||||
|
# A bug in Attic up to and including release 0.13 added a (meaningless) b'acl' key to every item.
|
||||||
|
# We ignore it here, should it exist. See test_attic013_acl_bug for details.
|
||||||
|
obj.pop(b'acl', None)
|
||||||
keys = set(obj)
|
keys = set(obj)
|
||||||
return required_item_keys.issubset(keys) and keys.issubset(item_keys)
|
if not required_item_keys.issubset(keys):
|
||||||
|
return False, 'missing required keys: ' + list_keys_safe(required_item_keys - keys)
|
||||||
|
if not keys.issubset(item_keys):
|
||||||
|
return False, 'invalid keys: ' + list_keys_safe(keys - item_keys)
|
||||||
|
return True, ''
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
for state, items in groupby(archive.items, missing_chunk_detector):
|
for state, items in groupby(archive.items, missing_chunk_detector):
|
||||||
|
@ -1281,10 +1294,11 @@ class ArchiveChecker:
|
||||||
unpacker.feed(data)
|
unpacker.feed(data)
|
||||||
try:
|
try:
|
||||||
for item in unpacker:
|
for item in unpacker:
|
||||||
if valid_item(item):
|
valid, reason = valid_item(item)
|
||||||
|
if valid:
|
||||||
yield Item(internal_dict=item)
|
yield Item(internal_dict=item)
|
||||||
else:
|
else:
|
||||||
report('Did not get expected metadata dict when unpacking item metadata', chunk_id, i)
|
report('Did not get expected metadata dict when unpacking item metadata (%s)' % reason, chunk_id, i)
|
||||||
except RobustUnpacker.UnpackerCrashed as err:
|
except RobustUnpacker.UnpackerCrashed as err:
|
||||||
report('Unpacker crashed while unpacking item metadata, trying to resync...', chunk_id, i)
|
report('Unpacker crashed while unpacking item metadata, trying to resync...', chunk_id, i)
|
||||||
unpacker.resync()
|
unpacker.resync()
|
||||||
|
|
|
@ -2203,6 +2203,8 @@ class Archiver:
|
||||||
considered for deletion and only those archives count towards the totals
|
considered for deletion and only those archives count towards the totals
|
||||||
specified by the rules.
|
specified by the rules.
|
||||||
Otherwise, *all* archives in the repository are candidates for deletion!
|
Otherwise, *all* archives in the repository are candidates for deletion!
|
||||||
|
There is no automatic distinction between archives representing different
|
||||||
|
contents. These need to be distinguished by specifying matching prefixes.
|
||||||
|
|
||||||
If you have multiple sequences of archives with different data sets (e.g.
|
If you have multiple sequences of archives with different data sets (e.g.
|
||||||
from different machines) in one shared repository, use one prune call per
|
from different machines) in one shared repository, use one prune call per
|
||||||
|
|
|
@ -16,6 +16,13 @@ class PropDict:
|
||||||
- optionally, decode when getting a value
|
- optionally, decode when getting a value
|
||||||
- be safe against typos in key names: check against VALID_KEYS
|
- be safe against typos in key names: check against VALID_KEYS
|
||||||
- when setting a value: check type of value
|
- when setting a value: check type of value
|
||||||
|
|
||||||
|
When "packing" a dict, ie. you have a dict with some data and want to convert it into an instance,
|
||||||
|
then use eg. Item({'a': 1, ...}). This way all keys in your dictionary are validated.
|
||||||
|
|
||||||
|
When "unpacking", that is you've read a dictionary with some data from somewhere (eg. msgpack),
|
||||||
|
then use eg. Item(internal_dict={...}). This does not validate the keys, therefore unknown keys
|
||||||
|
are ignored instead of causing an error.
|
||||||
"""
|
"""
|
||||||
VALID_KEYS = None # override with <set of str> in child class
|
VALID_KEYS = None # override with <set of str> in child class
|
||||||
|
|
||||||
|
@ -112,11 +119,14 @@ class Item(PropDict):
|
||||||
Items are created either from msgpack unpacker output, from another dict, from kwargs or
|
Items are created either from msgpack unpacker output, from another dict, from kwargs or
|
||||||
built step-by-step by setting attributes.
|
built step-by-step by setting attributes.
|
||||||
|
|
||||||
msgpack gives us a dict with bytes-typed keys, just give it to Item(d) and use item.key_name later.
|
msgpack gives us a dict with bytes-typed keys, just give it to Item(internal_dict=d) and use item.key_name later.
|
||||||
msgpack gives us byte-typed values for stuff that should be str, we automatically decode when getting
|
msgpack gives us byte-typed values for stuff that should be str, we automatically decode when getting
|
||||||
such a property and encode when setting it.
|
such a property and encode when setting it.
|
||||||
|
|
||||||
If an Item shall be serialized, give as_dict() method output to msgpack packer.
|
If an Item shall be serialized, give as_dict() method output to msgpack packer.
|
||||||
|
|
||||||
|
A bug in Attic up to and including release 0.13 added a (meaningless) 'acl' key to every item.
|
||||||
|
We must never re-use this key. See test_attic013_acl_bug for details.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
VALID_KEYS = ITEM_KEYS | {'deleted', 'nlink', } # str-typed keys
|
VALID_KEYS = ITEM_KEYS | {'deleted', 'nlink', } # str-typed keys
|
||||||
|
|
|
@ -683,6 +683,9 @@ class Repository:
|
||||||
if transaction_id is None:
|
if transaction_id is None:
|
||||||
logger.debug('No index transaction found, trying latest segment')
|
logger.debug('No index transaction found, trying latest segment')
|
||||||
transaction_id = self.io.get_latest_segment()
|
transaction_id = self.io.get_latest_segment()
|
||||||
|
if transaction_id is None:
|
||||||
|
report_error('This repository contains no valid data.')
|
||||||
|
return False
|
||||||
if repair:
|
if repair:
|
||||||
self.io.cleanup(transaction_id)
|
self.io.cleanup(transaction_id)
|
||||||
segments_transaction_id = self.io.get_segments_transaction_id()
|
segments_transaction_id = self.io.get_segments_transaction_id()
|
||||||
|
|
|
@ -2154,6 +2154,42 @@ class ArchiverCheckTestCase(ArchiverTestCaseBase):
|
||||||
def test_verify_data_unencrypted(self):
|
def test_verify_data_unencrypted(self):
|
||||||
self._test_verify_data('--encryption', 'none')
|
self._test_verify_data('--encryption', 'none')
|
||||||
|
|
||||||
|
def test_empty_repository(self):
|
||||||
|
with Repository(self.repository_location, exclusive=True) as repository:
|
||||||
|
for id_ in repository.list():
|
||||||
|
repository.delete(id_)
|
||||||
|
repository.commit()
|
||||||
|
self.cmd('check', self.repository_location, exit_code=1)
|
||||||
|
|
||||||
|
def test_attic013_acl_bug(self):
|
||||||
|
# Attic up to release 0.13 contained a bug where every item unintentionally received
|
||||||
|
# a b'acl'=None key-value pair.
|
||||||
|
# This bug can still live on in Borg repositories (through borg upgrade).
|
||||||
|
class Attic013Item:
|
||||||
|
def as_dict():
|
||||||
|
return {
|
||||||
|
# These are required
|
||||||
|
b'path': '1234',
|
||||||
|
b'mtime': 0,
|
||||||
|
b'mode': 0,
|
||||||
|
b'user': b'0',
|
||||||
|
b'group': b'0',
|
||||||
|
b'uid': 0,
|
||||||
|
b'gid': 0,
|
||||||
|
# acl is the offending key.
|
||||||
|
b'acl': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
archive, repository = self.open_archive('archive1')
|
||||||
|
with repository:
|
||||||
|
manifest, key = Manifest.load(repository)
|
||||||
|
with Cache(repository, key, manifest) as cache:
|
||||||
|
archive = Archive(repository, key, manifest, '0.13', cache=cache, create=True)
|
||||||
|
archive.items_buffer.add(Attic013Item)
|
||||||
|
archive.save()
|
||||||
|
self.cmd('check', self.repository_location, exit_code=0)
|
||||||
|
self.cmd('list', self.repository_location + '::0.13', exit_code=0)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.platform == 'cygwin', reason='remote is broken on cygwin and hangs')
|
@pytest.mark.skipif(sys.platform == 'cygwin', reason='remote is broken on cygwin and hangs')
|
||||||
class RemoteArchiverTestCase(ArchiverTestCase):
|
class RemoteArchiverTestCase(ArchiverTestCase):
|
||||||
|
|
|
@ -12,9 +12,6 @@ from distutils.version import LooseVersion
|
||||||
|
|
||||||
from .helpers import Buffer
|
from .helpers import Buffer
|
||||||
|
|
||||||
from .logger import create_logger
|
|
||||||
logger = create_logger()
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
ENOATTR = errno.ENOATTR
|
ENOATTR = errno.ENOATTR
|
||||||
|
@ -68,7 +65,7 @@ if libc_name is None:
|
||||||
libc_name = 'libc.dylib'
|
libc_name = 'libc.dylib'
|
||||||
else:
|
else:
|
||||||
msg = "Can't find C library. No fallback known. Try installing ldconfig, gcc/cc or objdump."
|
msg = "Can't find C library. No fallback known. Try installing ldconfig, gcc/cc or objdump."
|
||||||
logger.error(msg)
|
print(msg, file=sys.stderr) # logger isn't initialized at this stage
|
||||||
raise Exception(msg)
|
raise Exception(msg)
|
||||||
|
|
||||||
# If we are running with fakeroot on Linux, then use the xattr functions of fakeroot. This is needed by
|
# If we are running with fakeroot on Linux, then use the xattr functions of fakeroot. This is needed by
|
||||||
|
|
Loading…
Reference in New Issue