1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2024-12-26 17:57:59 +00:00

move some constants to new constants module

This commit is contained in:
Thomas Waldmann 2016-04-17 03:15:19 +02:00
parent 30f732052e
commit 9a70a20370
9 changed files with 81 additions and 71 deletions

View file

@ -17,10 +17,11 @@
from io import BytesIO
from . import xattr
from .compress import Compressor, COMPR_BUFFER
from .constants import * # NOQA
from .helpers import Error, uid2user, user2uid, gid2group, group2gid, \
parse_timestamp, to_localtime, format_time, format_timedelta, \
Manifest, Statistics, decode_dict, make_path_safe, StableDict, int_to_bigint, bigint_to_int, \
ProgressIndicatorPercent, ChunkIteratorFileWrapper, remove_surrogates, log_multi, DASHES, \
ProgressIndicatorPercent, ChunkIteratorFileWrapper, remove_surrogates, log_multi, \
PathPrefixPattern, FnmatchPattern, open_item, file_status, format_file_size, consume
from .repository import Repository
from .platform import acl_get, acl_set
@ -29,19 +30,6 @@
from .cache import ChunkListEntry
import msgpack
ITEMS_BUFFER = 1024 * 1024
CHUNK_MIN_EXP = 19 # 2**19 == 512kiB
CHUNK_MAX_EXP = 23 # 2**23 == 8MiB
HASH_WINDOW_SIZE = 0xfff # 4095B
HASH_MASK_BITS = 21 # results in ~2MiB chunks statistically
# defaults, use --chunker-params to override
CHUNKER_PARAMS = (CHUNK_MIN_EXP, CHUNK_MAX_EXP, HASH_MASK_BITS, HASH_WINDOW_SIZE)
# chunker params for the items metadata stream, finer granularity
ITEMS_CHUNKER_PARAMS = (12, 16, 14, HASH_WINDOW_SIZE)
has_lchmod = hasattr(os, 'lchmod')
has_lchflags = hasattr(os, 'lchflags')
@ -59,7 +47,7 @@ def unpack_many(self, ids, filter=None, preload=False):
unpacker = msgpack.Unpacker(use_list=False)
for data in self.fetch_many(ids):
unpacker.feed(data)
items = [decode_dict(item, (b'path', b'source', b'user', b'group')) for item in unpacker]
items = [decode_dict(item, ITEM_TEXT_KEYS) for item in unpacker]
if filter:
items = [item for item in items if filter(item)]
for item in items:
@ -187,7 +175,7 @@ def _load_meta(self, id):
def load(self, id):
self.id = id
self.metadata = self._load_meta(self.id)
decode_dict(self.metadata, (b'name', b'comment', b'hostname', b'username', b'time', b'time_end'))
decode_dict(self.metadata, ARCHIVE_TEXT_KEYS)
self.metadata[b'cmdline'] = [arg.decode('utf-8', 'surrogateescape') for arg in self.metadata[b'cmdline']]
self.name = self.metadata[b'name']
@ -233,7 +221,7 @@ def iter_items(self, filter=None, preload=False):
def add_item(self, item):
unknown_keys = set(item) - ITEM_KEYS
assert not unknown_keys, ('unknown item metadata keys detected, please update ITEM_KEYS: %s',
assert not unknown_keys, ('unknown item metadata keys detected, please update constants.ITEM_KEYS: %s',
','.join(k.decode('ascii') for k in unknown_keys))
if self.show_progress:
self.stats.show_progress(item=item, dt=0.2)
@ -631,12 +619,6 @@ def _open_rb(path):
return os.open(path, flags_normal)
# this set must be kept complete, otherwise the RobustUnpacker might malfunction:
ITEM_KEYS = set([b'path', b'source', b'rdev', b'chunks', b'hardlink_master',
b'mode', b'user', b'group', b'uid', b'gid', b'mtime', b'atime', b'ctime',
b'xattrs', b'bsdflags', b'acl_nfs4', b'acl_access', b'acl_default', b'acl_extended', ])
class RobustUnpacker:
"""A restartable/robust version of the streaming msgpack unpacker
"""
@ -894,7 +876,7 @@ def report(msg, chunk_id, chunk_no):
archive = StableDict(msgpack.unpackb(data))
if archive[b'version'] != 1:
raise Exception('Unknown archive metadata version')
decode_dict(archive, (b'name', b'comment', b'hostname', b'username', b'time', b'time_end'))
decode_dict(archive, ARCHIVE_TEXT_KEYS)
archive[b'cmdline'] = [arg.decode('utf-8', 'surrogateescape') for arg in archive[b'cmdline']]
items_buffer = ChunkBuffer(self.key)
items_buffer.write_chunk = add_callback
@ -1154,24 +1136,23 @@ def exclude(dir, tag_item):
matcher = self.matcher
tag_files = []
tagged_dirs = []
# build hardlink masters, but only for paths ending in CACHEDIR.TAG, so we can read hard-linked CACHEDIR.TAGs
# build hardlink masters, but only for paths ending in CACHE_TAG_NAME, so we can read hard-linked TAGs
cachedir_masters = {}
for item in archive.iter_items(
filter=lambda item: item[b'path'].endswith('CACHEDIR.TAG') or matcher.match(item[b'path'])):
if item[b'path'].endswith('CACHEDIR.TAG'):
filter=lambda item: item[b'path'].endswith(CACHE_TAG_NAME) or matcher.match(item[b'path'])):
if item[b'path'].endswith(CACHE_TAG_NAME):
cachedir_masters[item[b'path']] = item
if stat.S_ISREG(item[b'mode']):
dir, tag_file = os.path.split(item[b'path'])
if tag_file in self.exclude_if_present:
exclude(dir, item)
if self.exclude_caches and tag_file == 'CACHEDIR.TAG':
tag_contents = b'Signature: 8a477f597d28d172789f06886806bc55'
if self.exclude_caches and tag_file == CACHE_TAG_NAME:
if b'chunks' in item:
file = open_item(archive, item)
else:
file = open_item(archive, cachedir_masters[item[b'source']])
if file.read(len(tag_contents)).startswith(tag_contents):
if file.read(len(CACHE_TAG_CONTENTS)).startswith(CACHE_TAG_CONTENTS):
exclude(dir, item)
matcher.add(tag_files, True)
matcher.add(tagged_dirs, False)

View file

@ -22,23 +22,21 @@
get_cache_dir, prune_within, prune_split, \
Manifest, remove_surrogates, update_excludes, format_archive, check_extension_modules, Statistics, \
dir_is_tagged, ChunkerParams, CompressionSpec, is_slow_msgpack, yes, sysinfo, \
EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR, log_multi, PatternMatcher, ItemFormatter, DASHES
log_multi, PatternMatcher, ItemFormatter
from .logger import create_logger, setup_logging
logger = create_logger()
from .compress import Compressor, COMPR_BUFFER
from .upgrader import AtticRepositoryUpgrader, BorgRepositoryUpgrader
from .repository import Repository
from .cache import Cache
from .constants import * # NOQA
from .key import key_creator, RepoKey, PassphraseKey
from .archive import Archive, ArchiveChecker, ArchiveRecreater, CHUNKER_PARAMS
from .archive import Archive, ArchiveChecker, ArchiveRecreater
from .remote import RepositoryServer, RemoteRepository, cache_if_remote
from .hashindex import ChunkIndexEntry
has_lchflags = hasattr(os, 'lchflags')
# default umask, overriden by --umask, defaults to read/write only for owner
UMASK_DEFAULT = 0o077
def argument(args, str_or_bool):
"""If bool is passed, return it. If str is passed, retrieve named attribute from args."""
@ -1956,7 +1954,6 @@ def main(): # pragma: no cover
elif exit_code == EXIT_ERROR:
logger.error(exit_msg % ('error', exit_code))
else:
# if you see 666 in output, it usually means exit_code was None
logger.error(exit_msg % ('abnormal', exit_code or 666))
sys.exit(exit_code)

37
borg/constants.py Normal file
View file

@ -0,0 +1,37 @@
# this set must be kept complete, otherwise the RobustUnpacker might malfunction:
ITEM_KEYS = set([b'path', b'source', b'rdev', b'chunks', b'hardlink_master',
b'mode', b'user', b'group', b'uid', b'gid', b'mtime', b'atime', b'ctime',
b'xattrs', b'bsdflags', b'acl_nfs4', b'acl_access', b'acl_default', b'acl_extended', ])
ARCHIVE_TEXT_KEYS = (b'name', b'comment', b'hostname', b'username', b'time', b'time_end')
ITEM_TEXT_KEYS = (b'path', b'source', b'user', b'group')
# default umask, overriden by --umask, defaults to read/write only for owner
UMASK_DEFAULT = 0o077
CACHE_TAG_NAME = 'CACHEDIR.TAG'
CACHE_TAG_CONTENTS = b'Signature: 8a477f597d28d172789f06886806bc55'
DEFAULT_MAX_SEGMENT_SIZE = 5 * 1024 * 1024
DEFAULT_SEGMENTS_PER_DIR = 10000
CHUNK_MIN_EXP = 19 # 2**19 == 512kiB
CHUNK_MAX_EXP = 23 # 2**23 == 8MiB
HASH_WINDOW_SIZE = 0xfff # 4095B
HASH_MASK_BITS = 21 # results in ~2MiB chunks statistically
# defaults, use --chunker-params to override
CHUNKER_PARAMS = (CHUNK_MIN_EXP, CHUNK_MAX_EXP, HASH_MASK_BITS, HASH_WINDOW_SIZE)
# chunker params for the items metadata stream, finer granularity
ITEMS_CHUNKER_PARAMS = (12, 16, 14, HASH_WINDOW_SIZE)
# return codes returned by borg command
# when borg is killed by signal N, rc = 128 + N
EXIT_SUCCESS = 0 # everything done, no problems
EXIT_WARNING = 1 # reached normal end of operation, but there were issues
EXIT_ERROR = 2 # terminated abruptly, did not reach end of operation
DASHES = '-' * 78
PBKDF2_ITERATIONS = 100000

View file

@ -28,6 +28,7 @@
from . import __version__ as borg_version
from . import hashindex
from . import chunker
from .constants import * # NOQA
from . import crypto
from . import shellpattern
import msgpack
@ -35,14 +36,6 @@
import socket
# return codes returned by borg command
# when borg is killed by signal N, rc = 128 + N
EXIT_SUCCESS = 0 # everything done, no problems
EXIT_WARNING = 1 # reached normal end of operation, but there were issues
EXIT_ERROR = 2 # terminated abruptly, did not reach end of operation
DASHES = '-' * 78
class Error(Exception):
"""Error base class"""
@ -248,13 +241,13 @@ def get_cache_dir():
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
os.chmod(cache_dir, stat.S_IRWXU)
with open(os.path.join(cache_dir, 'CACHEDIR.TAG'), 'w') as fd:
with open(os.path.join(cache_dir, CACHE_TAG_NAME), 'wb') as fd:
fd.write(CACHE_TAG_CONTENTS)
fd.write(textwrap.dedent("""
Signature: 8a477f597d28d172789f06886806bc55
# This file is a cache directory tag created by Borg.
# For information about cache directory tags, see:
# http://www.brynosaurus.com/cachedir/
""").lstrip())
""").encode('ascii'))
return cache_dir
@ -495,7 +488,6 @@ def timestamp(s):
def ChunkerParams(s):
if s.strip().lower() == "default":
from .archive import CHUNKER_PARAMS
return CHUNKER_PARAMS
chunk_min, chunk_max, chunk_mask, window_size = s.split(',')
if int(chunk_max) > 23:
@ -534,13 +526,12 @@ def dir_is_cachedir(path):
(http://www.brynosaurus.com/cachedir/spec.html).
"""
tag_contents = b'Signature: 8a477f597d28d172789f06886806bc55'
tag_path = os.path.join(path, 'CACHEDIR.TAG')
tag_path = os.path.join(path, CACHE_TAG_NAME)
try:
if os.path.exists(tag_path):
with open(tag_path, 'rb') as tag_file:
tag_data = tag_file.read(len(tag_contents))
if tag_data == tag_contents:
tag_data = tag_file.read(len(CACHE_TAG_CONTENTS))
if tag_data == CACHE_TAG_CONTENTS:
return True
except OSError:
pass
@ -555,7 +546,7 @@ def dir_is_tagged(path, exclude_caches, exclude_if_present):
"""
tag_paths = []
if exclude_caches and dir_is_cachedir(path):
tag_paths.append(os.path.join(path, 'CACHEDIR.TAG'))
tag_paths.append(os.path.join(path, CACHE_TAG_NAME))
if exclude_if_present is not None:
for tag in exclude_if_present:
tag_path = os.path.join(path, tag)

View file

@ -11,6 +11,7 @@
from .logger import create_logger
logger = create_logger()
from .constants import * # NOQA
from .crypto import AES, bytes_to_long, long_to_bytes, bytes_to_int, num_aes_blocks, hmac_sha256
from .compress import Compressor, COMPR_BUFFER
import msgpack
@ -338,7 +339,7 @@ def decrypt_key_file(self, data, passphrase):
def encrypt_key_file(self, data, passphrase):
salt = os.urandom(32)
iterations = 100000
iterations = PBKDF2_ITERATIONS
key = passphrase.kdf(salt, iterations, 32)
hash = hmac_sha256(key, data)
cdata = AES(is_encrypt=True, key=key).encrypt(data)

View file

@ -12,6 +12,7 @@
from zlib import crc32
import msgpack
from .constants import * # NOQA
from .helpers import Error, ErrorWithTraceback, IntegrityError, Location, ProgressIndicatorPercent
from .hashindex import NSIndex
from .locking import UpgradableLock, LockError, LockErrorT
@ -35,9 +36,6 @@ class Repository:
dir/index.X
dir/hints.X
"""
DEFAULT_MAX_SEGMENT_SIZE = 5 * 1024 * 1024
DEFAULT_SEGMENTS_PER_DIR = 10000
class DoesNotExist(Error):
"""Repository {} does not exist."""
@ -98,8 +96,8 @@ def create(self, path):
config = ConfigParser(interpolation=None)
config.add_section('repository')
config.set('repository', 'version', '1')
config.set('repository', 'segments_per_dir', str(self.DEFAULT_SEGMENTS_PER_DIR))
config.set('repository', 'max_segment_size', str(self.DEFAULT_MAX_SEGMENT_SIZE))
config.set('repository', 'segments_per_dir', str(DEFAULT_SEGMENTS_PER_DIR))
config.set('repository', 'max_segment_size', str(DEFAULT_MAX_SEGMENT_SIZE))
config.set('repository', 'append_only', '0')
config.set('repository', 'id', hexlify(os.urandom(32)).decode('ascii'))
self.save_config(path, config)

View file

@ -18,11 +18,12 @@
import pytest
from .. import xattr
from ..archive import Archive, ChunkBuffer, ArchiveRecreater, CHUNK_MAX_EXP
from ..archive import Archive, ChunkBuffer, ArchiveRecreater
from ..archiver import Archiver
from ..cache import Cache
from ..constants import * # NOQA
from ..crypto import bytes_to_long, num_aes_blocks
from ..helpers import Manifest, EXIT_SUCCESS, EXIT_WARNING, EXIT_ERROR
from ..helpers import Manifest
from ..key import KeyfileKeyBase
from ..remote import RemoteRepository, PathNotAllowed
from ..repository import Repository
@ -641,16 +642,18 @@ def test_extract_with_pattern(self):
def _create_test_caches(self):
self.cmd('init', self.repository_location)
self.create_regular_file('file1', size=1024 * 80)
self.create_regular_file('cache1/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
self.create_regular_file('cache2/CACHEDIR.TAG', contents=b'invalid signature')
self.create_regular_file('cache1/%s' % CACHE_TAG_NAME,
contents=CACHE_TAG_CONTENTS + b' extra stuff')
self.create_regular_file('cache2/%s' % CACHE_TAG_NAME,
contents=b'invalid signature')
os.mkdir('input/cache3')
os.link('input/cache1/CACHEDIR.TAG', 'input/cache3/CACHEDIR.TAG')
os.link('input/cache1/%s' % CACHE_TAG_NAME, 'input/cache3/%s' % CACHE_TAG_NAME)
def _assert_test_caches(self):
with changedir('output'):
self.cmd('extract', self.repository_location + '::test')
self.assert_equal(sorted(os.listdir('output/input')), ['cache2', 'file1'])
self.assert_equal(sorted(os.listdir('output/input/cache2')), ['CACHEDIR.TAG'])
self.assert_equal(sorted(os.listdir('output/input/cache2')), [CACHE_TAG_NAME])
def test_exclude_caches(self):
self._create_test_caches()
@ -694,11 +697,13 @@ def _create_test_keep_tagged(self):
self.create_regular_file('tagged1/file1', size=1024)
self.create_regular_file('tagged2/.NOBACKUP2')
self.create_regular_file('tagged2/file2', size=1024)
self.create_regular_file('tagged3/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
self.create_regular_file('tagged3/%s' % CACHE_TAG_NAME,
contents=CACHE_TAG_CONTENTS + b' extra stuff')
self.create_regular_file('tagged3/file3', size=1024)
self.create_regular_file('taggedall/.NOBACKUP1')
self.create_regular_file('taggedall/.NOBACKUP2')
self.create_regular_file('taggedall/CACHEDIR.TAG', contents=b'Signature: 8a477f597d28d172789f06886806bc55 extra stuff')
self.create_regular_file('taggedall/%s' % CACHE_TAG_NAME,
contents=CACHE_TAG_CONTENTS + b' extra stuff')
self.create_regular_file('taggedall/file4', size=1024)
def _assert_test_keep_tagged(self):
@ -707,9 +712,9 @@ def _assert_test_keep_tagged(self):
self.assert_equal(sorted(os.listdir('output/input')), ['file0', 'tagged1', 'tagged2', 'tagged3', 'taggedall'])
self.assert_equal(os.listdir('output/input/tagged1'), ['.NOBACKUP1'])
self.assert_equal(os.listdir('output/input/tagged2'), ['.NOBACKUP2'])
self.assert_equal(os.listdir('output/input/tagged3'), ['CACHEDIR.TAG'])
self.assert_equal(os.listdir('output/input/tagged3'), [CACHE_TAG_NAME])
self.assert_equal(sorted(os.listdir('output/input/taggedall')),
['.NOBACKUP1', '.NOBACKUP2', 'CACHEDIR.TAG', ])
['.NOBACKUP1', '.NOBACKUP2', CACHE_TAG_NAME, ])
def test_exclude_keep_tagged(self):
self._create_test_keep_tagged()

View file

@ -1,7 +1,7 @@
from io import BytesIO
from ..chunker import Chunker, buzhash, buzhash_update
from ..archive import CHUNK_MAX_EXP, CHUNKER_PARAMS
from ..constants import * # NOQA
from . import BaseTestCase

View file

@ -9,10 +9,10 @@
except ImportError:
attic = None
from ..constants import * # NOQA
from ..upgrader import AtticRepositoryUpgrader, AtticKeyfileKey
from ..helpers import get_keys_dir
from ..key import KeyfileKey
from ..archiver import UMASK_DEFAULT
from ..repository import Repository