1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2024-12-26 17:57:59 +00:00

use a module-specific logger instead of global one

that way we have one logger per module, and we can pick and choose
which module we want verbose, for example
This commit is contained in:
Antoine Beaupré 2015-10-02 10:58:08 -04:00
parent 1b540d91a0
commit c9b11316ab
7 changed files with 83 additions and 53 deletions

View file

@ -3,6 +3,7 @@
from itertools import groupby
import errno
import logging
logger = logging.getLogger(__name__)
from .key import key_factory
from .remote import cache_if_remote
@ -630,7 +631,7 @@ def check(self, repository, repair=False, archive=None, last=None):
self.orphan_chunks_check()
self.finish()
if not self.error_found:
logging.info('Archive consistency check complete, no problems found.')
logger.info('Archive consistency check complete, no problems found.')
return self.repair or not self.error_found
def init_chunks(self):
@ -652,7 +653,7 @@ def init_chunks(self):
def report_progress(self, msg, error=False):
if error:
self.error_found = True
logging.log(logging.ERROR if error else logging.WARNING, msg)
logger.log(logging.ERROR if error else logging.WARNING, msg)
def identify_key(self, repository):
cdata = repository.get(next(self.chunks.iteritems())[0])
@ -779,7 +780,7 @@ def missing_chunk_detector(chunk_id):
num_archives = 1
end = 1
for i, (name, info) in enumerate(archive_items[:end]):
logging.info('Analyzing archive {} ({}/{})'.format(name, num_archives - i, num_archives))
logger.info('Analyzing archive {} ({}/{})'.format(name, num_archives - i, num_archives))
archive_id = info[b'id']
if archive_id not in self.chunks:
self.report_progress('Archive metadata block is missing', error=True)

View file

@ -8,6 +8,8 @@
import inspect
import io
import logging
logger = logging.getLogger(__name__)
import os
import signal
import stat
@ -47,11 +49,11 @@ def open_repository(self, location, create=False, exclusive=False):
def print_error(self, msg, *args):
msg = args and msg % args or msg
self.exit_code = 1
logging.error('borg: ' + msg)
logger.error('borg: ' + msg)
def print_verbose(self, msg, *args, **kw):
msg = args and msg % args or msg
logging.info(msg)
logger.info(msg)
def do_serve(self, args):
"""Start in server mode. This command is usually not used manually.
@ -60,7 +62,7 @@ def do_serve(self, args):
def do_init(self, args):
"""Initialize an empty repository"""
logging.info('Initializing repository at "%s"' % args.repository.orig)
logger.info('Initializing repository at "%s"' % args.repository.orig)
repository = self.open_repository(args.repository, create=True, exclusive=True)
key = key_creator(repository, args)
manifest = Manifest(key, repository)
@ -82,9 +84,9 @@ def do_check(self, args):
if input('Do you want to continue? ') == 'Yes I am sure':
break
if not args.archives_only:
logging.warning('Starting repository check...')
logger.warning('Starting repository check...')
if repository.check(repair=args.repair):
logging.info('Repository check complete, no problems found.')
logger.info('Repository check complete, no problems found.')
else:
return 1
if not args.repo_only and not ArchiveChecker().check(
@ -160,15 +162,15 @@ def do_create(self, args):
if args.stats:
t = datetime.now()
diff = t - t0
logging.warning('-' * 78)
logging.warning('Archive name: %s' % args.archive.archive)
logging.warning('Archive fingerprint: %s' % hexlify(archive.id).decode('ascii'))
logging.warning('Start time: %s' % t0.strftime('%c'))
logging.warning('End time: %s' % t.strftime('%c'))
logging.warning('Duration: %s' % format_timedelta(diff))
logging.warning('Number of files: %d' % archive.stats.nfiles)
logging.warning(archive.stats.print_('This archive:', cache))
logging.warning('-' * 78)
logger.warning('-' * 78)
logger.warning('Archive name: %s' % args.archive.archive)
logger.warning('Archive fingerprint: %s' % hexlify(archive.id).decode('ascii'))
logger.warning('Start time: %s' % t0.strftime('%c'))
logger.warning('End time: %s' % t.strftime('%c'))
logger.warning('Duration: %s' % format_timedelta(diff))
logger.warning('Number of files: %d' % archive.stats.nfiles)
logger.warning(archive.stats.print_('This archive:', cache))
logger.warning('-' * 78)
return self.exit_code
def _process(self, archive, cache, excludes, exclude_caches, skip_inodes, path, restrict_dev,
@ -247,7 +249,7 @@ def do_extract(self, args):
"""Extract archive contents"""
# be restrictive when restoring files, restore permissions later
if sys.getfilesystemencoding() == 'ascii':
logging.warning('Warning: File system encoding is "ascii", extracting non-ascii filenames will not be supported.')
logger.warning('Warning: File system encoding is "ascii", extracting non-ascii filenames will not be supported.')
repository = self.open_repository(args.archive)
manifest, key = Manifest.load(repository)
archive = Archive(repository, key, manifest, args.archive.archive,
@ -313,11 +315,11 @@ def do_delete(self, args):
repository.commit()
cache.commit()
if args.stats:
logging.warning(stats.print_('Deleted data:', cache))
logger.warning(stats.print_('Deleted data:', cache))
else:
logging.warning("You requested to completely DELETE the repository *including* all archives it contains:")
logger.warning("You requested to completely DELETE the repository *including* all archives it contains:")
for archive_info in manifest.list_archive_infos(sort_by='ts'):
logging.warning(format_archive(archive_info))
logger.warning(format_archive(archive_info))
if not os.environ.get('BORG_CHECK_I_KNOW_WHAT_I_AM_DOING'):
print("""Type "YES" if you understand this and want to continue.""")
if input('Do you want to continue? ') != 'YES':
@ -325,7 +327,7 @@ def do_delete(self, args):
return self.exit_code
repository.destroy()
cache.destroy()
logging.info("Repository and corresponding cache were deleted.")
logger.info("Repository and corresponding cache were deleted.")
return self.exit_code
def do_mount(self, args):
@ -405,14 +407,14 @@ def do_info(self, args):
cache = Cache(repository, key, manifest, do_files=args.cache_files)
archive = Archive(repository, key, manifest, args.archive.archive, cache=cache)
stats = archive.calc_stats(cache)
logging.warning('Name:', archive.name)
logging.warning('Fingerprint: %s' % hexlify(archive.id).decode('ascii'))
logging.warning('Hostname:', archive.metadata[b'hostname'])
logging.warning('Username:', archive.metadata[b'username'])
logging.warning('Time: %s' % to_localtime(archive.ts).strftime('%c'))
logging.warning('Command line:', remove_surrogates(' '.join(archive.metadata[b'cmdline'])))
logging.warning('Number of files: %d' % stats.nfiles)
logging.warning(stats.print_('This archive:', cache))
logger.warning('Name:', archive.name)
logger.warning('Fingerprint: %s' % hexlify(archive.id).decode('ascii'))
logger.warning('Hostname:', archive.metadata[b'hostname'])
logger.warning('Username:', archive.metadata[b'username'])
logger.warning('Time: %s' % to_localtime(archive.ts).strftime('%c'))
logger.warning('Command line:', remove_surrogates(' '.join(archive.metadata[b'cmdline'])))
logger.warning('Number of files: %d' % stats.nfiles)
logger.warning(stats.print_('This archive:', cache))
return self.exit_code
def do_prune(self, args):
@ -457,7 +459,7 @@ def do_prune(self, args):
repository.commit()
cache.commit()
if args.stats:
logging.warning(stats.print_('Deleted data:', cache))
logger.warning(stats.print_('Deleted data:', cache))
return self.exit_code
helptext = {}
@ -511,6 +513,7 @@ def setup_logging(self, args):
logging.raiseExceptions = False
l = logging.getLogger('')
sh = logging.StreamHandler(sys.stderr)
sh.setFormatter(logging.Formatter('%(name)s: %(message)s'))
l.addHandler(sh)
levels = { None: logging.WARNING,
0: logging.WARNING,
@ -936,7 +939,7 @@ def sig_info_handler(signum, stack): # pragma: no cover
total = loc['st'].st_size
except Exception:
pos, total = 0, 0
logging.warning("{0} {1}/{2}".format(path, format_file_size(pos), format_file_size(total)))
logger.warning("{0} {1}/{2}".format(path, format_file_size(pos), format_file_size(total)))
break
if func in ('extract_item', ): # extract op
path = loc['item'][b'path']
@ -944,7 +947,7 @@ def sig_info_handler(signum, stack): # pragma: no cover
pos = loc['fd'].tell()
except Exception:
pos = 0
logging.warning("{0} {1}/???".format(path, format_file_size(pos)))
logger.warning("{0} {1}/???".format(path, format_file_size(pos)))
break

View file

@ -2,6 +2,8 @@
from .remote import cache_if_remote
import errno
import logging
logger = logging.getLogger(__name__)
import msgpack
import os
import stat
@ -72,9 +74,9 @@ def __del__(self):
self.close()
def _confirm(self, message, env_var_override=None):
logging.warning(message)
logger.warning(message)
if env_var_override and os.environ.get(env_var_override):
logging.warning("Yes (From {})".format(env_var_override))
logger.warning("Yes (From {})".format(env_var_override))
return True
if not sys.stdin.isatty():
return False
@ -254,7 +256,7 @@ def fetch_and_build_idx(archive_id, repository, key):
unpacker.feed(data)
for item in unpacker:
if not isinstance(item, dict):
logging.error('Error: Did not get expected metadata dict - archive corrupted!')
logger.error('Error: Did not get expected metadata dict - archive corrupted!')
continue
if b'chunks' in item:
for chunk_id, size, csize in item[b'chunks']:
@ -275,10 +277,10 @@ def lookup_name(archive_id):
return name
def create_master_idx(chunk_idx):
logging.info('Synchronizing chunks cache...')
logger.info('Synchronizing chunks cache...')
cached_ids = cached_archives()
archive_ids = repo_archives()
logging.info('Archives: %d, w/ cached Idx: %d, w/ outdated Idx: %d, w/o cached Idx: %d.' % (
logger.info('Archives: %d, w/ cached Idx: %d, w/ outdated Idx: %d, w/o cached Idx: %d.' % (
len(archive_ids), len(cached_ids),
len(cached_ids - archive_ids), len(archive_ids - cached_ids), ))
# deallocates old hashindex, creates empty hashindex:
@ -290,12 +292,12 @@ def create_master_idx(chunk_idx):
archive_name = lookup_name(archive_id)
if archive_id in cached_ids:
archive_chunk_idx_path = mkpath(archive_id)
logging.info("Reading cached archive chunk index for %s ..." % archive_name)
logger.info("Reading cached archive chunk index for %s ..." % archive_name)
archive_chunk_idx = ChunkIndex.read(archive_chunk_idx_path)
else:
logging.info('Fetching and building archive index for %s ...' % archive_name)
logger.info('Fetching and building archive index for %s ...' % archive_name)
archive_chunk_idx = fetch_and_build_idx(archive_id, repository, self.key)
logging.info("Merging into master chunks index ...")
logger.info("Merging into master chunks index ...")
if chunk_idx is None:
# we just use the first archive's idx as starting point,
# to avoid growing the hash table from 0 size and also
@ -303,7 +305,7 @@ def create_master_idx(chunk_idx):
chunk_idx = archive_chunk_idx
else:
chunk_idx.merge(archive_chunk_idx)
logging.info('Done.')
logger.info('Done.')
return chunk_idx
def legacy_cleanup():

View file

@ -6,6 +6,8 @@
from functools import wraps
import grp
import logging
logger = logging.getLogger(__name__)
import os
import pwd
import re

View file

@ -2,6 +2,8 @@
import configparser
import getpass
import logging
logger = logging.getLogger(__name__)
import os
import msgpack
import textwrap
@ -89,7 +91,7 @@ class PlaintextKey(KeyBase):
@classmethod
def create(cls, repository, args):
logging.info('Encryption NOT enabled.\nUse the "--encryption=repokey|keyfile|passphrase" to enable encryption.')
logger.info('Encryption NOT enabled.\nUse the "--encryption=repokey|keyfile|passphrase" to enable encryption.')
return cls(repository)
@classmethod
@ -191,12 +193,12 @@ def new(cls, allow_empty=False):
if allow_empty or passphrase:
passphrase2 = cls.getpass('Enter same passphrase again: ')
if passphrase == passphrase2:
logging.info('Remember your passphrase. Your data will be inaccessible without it.')
logger.info('Remember your passphrase. Your data will be inaccessible without it.')
return passphrase
else:
logging.warning('Passphrases do not match')
logger.warning('Passphrases do not match')
else:
logging.warning('Passphrase must not be blank')
logger.warning('Passphrase must not be blank')
def __repr__(self):
return '<Passphrase "***hidden***">'
@ -216,8 +218,8 @@ class PassphraseKey(AESKeyBase):
@classmethod
def create(cls, repository, args):
key = cls(repository)
logging.warning('WARNING: "passphrase" mode is deprecated and will be removed in 1.0.')
logging.warning('If you want something similar (but with less issues), use "repokey" mode.')
logger.warning('WARNING: "passphrase" mode is deprecated and will be removed in 1.0.')
logger.warning('If you want something similar (but with less issues), use "repokey" mode.')
passphrase = Passphrase.new(allow_empty=False)
key.init(repository, passphrase)
return key
@ -325,7 +327,7 @@ def _save(self, passphrase):
def change_passphrase(self):
passphrase = Passphrase.new(allow_empty=True)
self.save(self.target, passphrase)
logging.info('Key updated')
logger.info('Key updated')
@classmethod
def create(cls, repository, args):
@ -336,8 +338,8 @@ def create(cls, repository, args):
key.init_ciphers()
target = key.get_new_target(args)
key.save(target, passphrase)
logging.info('Key in "%s" created.' % target)
logging.info('Keep this key safe. Your data will be inaccessible without it.')
logger.info('Key in "%s" created.' % target)
logger.info('Keep this key safe. Your data will be inaccessible without it.')
return key
def save(self, target, passphrase):

View file

@ -3,6 +3,8 @@
from itertools import islice
import errno
import logging
logger = logging.getLogger(__name__)
import os
import shutil
import struct
@ -279,7 +281,7 @@ def check(self, repair=False):
def report_error(msg):
nonlocal error_found
error_found = True
logging.error(msg)
logger.error(msg)
assert not self._active_txn
try:
@ -567,7 +569,7 @@ def recover_segment(self, segment, filename):
with open(filename, 'rb') as fd:
data = memoryview(fd.read())
os.rename(filename, filename + '.beforerecover')
logging.info('attempting to recover ' + filename)
logger.info('attempting to recover ' + filename)
with open(filename, 'wb') as fd:
fd.write(MAGIC)
while len(data) >= self.header_fmt.size:

18
borg/testsuite/logger.py Normal file
View file

@ -0,0 +1,18 @@
import logging
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import pytest
def test_mod_logger():
logger = logging.getLogger(__name__)
io = StringIO()
ch = logging.StreamHandler(io)
ch.setFormatter(logging.Formatter('%(name)s: %(message)s'))
logger.addHandler(ch)
logger.setLevel(logging.DEBUG)
logger.info('hello world')
assert io.getvalue() == "borg.testsuite.logger: hello world\n"