1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2024-12-25 09:19:31 +00:00

Merge pull request #2215 from enkore/f/logjson

Add --log-json option for structured logging output
This commit is contained in:
enkore 2017-02-26 16:49:48 +01:00 committed by GitHub
commit 5f802f3942
4 changed files with 89 additions and 22 deletions

View file

@ -1,4 +1,5 @@
import errno
import json
import os
import socket
import stat
@ -49,7 +50,8 @@
class Statistics:
def __init__(self):
def __init__(self, output_json=False):
self.output_json = output_json
self.osize = self.csize = self.usize = self.nfiles = 0
self.last_progress = 0 # timestamp when last progress was shown
@ -92,19 +94,29 @@ def show_progress(self, item=None, final=False, stream=None, dt=None):
now = time.monotonic()
if dt is None or now - self.last_progress > dt:
self.last_progress = now
columns, lines = get_terminal_size()
if not final:
msg = '{0.osize_fmt} O {0.csize_fmt} C {0.usize_fmt} D {0.nfiles} N '.format(self)
path = remove_surrogates(item.path) if item else ''
space = columns - swidth(msg)
if space < 12:
msg = ''
space = columns - swidth(msg)
if space >= 8:
msg += ellipsis_truncate(path, space)
if self.output_json:
data = self.as_dict()
data.update({
'type': 'archive_progress',
'path': remove_surrogates(item.path if item else ''),
})
msg = json.dumps(data)
end = '\n'
else:
msg = ' ' * columns
print(msg, file=stream or sys.stderr, end="\r", flush=True)
columns, lines = get_terminal_size()
if not final:
msg = '{0.osize_fmt} O {0.csize_fmt} C {0.usize_fmt} D {0.nfiles} N '.format(self)
path = remove_surrogates(item.path) if item else ''
space = columns - swidth(msg)
if space < 12:
msg = ''
space = columns - swidth(msg)
if space >= 8:
msg += ellipsis_truncate(path, space)
else:
msg = ' ' * columns
end = '\r'
print(msg, end=end, file=stream or sys.stderr, flush=True)
def is_special(mode):
@ -264,14 +276,14 @@ class IncompatibleFilesystemEncodingError(Error):
def __init__(self, repository, key, manifest, name, cache=None, create=False,
checkpoint_interval=300, numeric_owner=False, noatime=False, noctime=False, progress=False,
chunker_params=CHUNKER_PARAMS, start=None, start_monotonic=None, end=None, compression=None, compression_files=None,
consider_part_files=False):
consider_part_files=False, log_json=False):
self.cwd = os.getcwd()
self.key = key
self.repository = repository
self.cache = cache
self.manifest = manifest
self.hard_links = {}
self.stats = Statistics()
self.stats = Statistics(output_json=log_json)
self.show_progress = progress
self.name = name
self.checkpoint_interval = checkpoint_interval

View file

@ -121,7 +121,7 @@ def with_archive(method):
def wrapper(self, args, repository, key, manifest, **kwargs):
archive = Archive(repository, key, manifest, args.location.archive,
numeric_owner=getattr(args, 'numeric_owner', False), cache=kwargs.get('cache'),
consider_part_files=args.consider_part_files)
consider_part_files=args.consider_part_files, log_json=args.log_json)
return method(self, args, repository=repository, manifest=manifest, key=key, archive=archive, **kwargs)
return wrapper
@ -145,7 +145,14 @@ def print_warning(self, msg, *args):
def print_file_status(self, status, path):
if self.output_list and (self.output_filter is None or status in self.output_filter):
logging.getLogger('borg.output.list').info("%1s %s", status, remove_surrogates(path))
if self.log_json:
print(json.dumps({
'type': 'file_status',
'status': status,
'path': remove_surrogates(path),
}), file=sys.stderr)
else:
logging.getLogger('borg.output.list').info("%1s %s", status, remove_surrogates(path))
@staticmethod
def compare_chunk_contents(chunks1, chunks2):
@ -395,7 +402,8 @@ def create_inner(archive, cache):
numeric_owner=args.numeric_owner, noatime=args.noatime, noctime=args.noctime,
progress=args.progress,
chunker_params=args.chunker_params, start=t0, start_monotonic=t0_monotonic,
compression=args.compression, compression_files=args.compression_files)
compression=args.compression, compression_files=args.compression_files,
log_json=args.log_json)
create_inner(archive, cache)
else:
create_inner(None, None)
@ -1776,6 +1784,8 @@ def process_epilog(epilog):
action='append', metavar='TOPIC', default=[],
help='enable TOPIC debugging (can be specified multiple times). '
'The logger path is borg.debug.<TOPIC> if TOPIC is not fully qualified.')
common_group.add_argument('--log-json', dest='log_json', action='store_true',
help='Output one JSON object per log line instead of formatted text.')
common_group.add_argument('--lock-wait', dest='lock_wait', type=int, metavar='N', default=1,
help='wait for the lock, but max. N seconds (default: %(default)d).')
common_group.add_argument('--show-version', dest='show_version', action='store_true', default=False,
@ -3176,7 +3186,9 @@ def run(self, args):
self.lock_wait = args.lock_wait
# This works around http://bugs.python.org/issue9351
func = getattr(args, 'func', None) or getattr(args, 'fallback_func')
setup_logging(level=args.log_level, is_serve=func == self.do_serve) # do not use loggers before this!
# do not use loggers before this!
setup_logging(level=args.log_level, is_serve=func == self.do_serve, json=args.log_json)
self.log_json = args.log_json
self._setup_implied_logging(vars(args))
self._setup_topic_debugging(args)
if args.show_version:

View file

@ -1388,7 +1388,17 @@ def __init__(self):
if not self.logger.handlers:
self.handler = logging.StreamHandler(stream=sys.stderr)
self.handler.setLevel(logging.INFO)
self.handler.terminator = '\r'
logger = logging.getLogger('borg')
# Some special attributes on the borg logger, created by setup_logging
# But also be able to work without that
try:
formatter = logger.formatter
terminator = '\n' if logger.json else '\r'
except AttributeError:
terminator = '\r'
else:
self.handler.setFormatter(formatter)
self.handler.terminator = terminator
self.logger.addHandler(self.handler)
if self.logger.level == logging.NOTSET:

View file

@ -31,6 +31,7 @@
"""
import inspect
import json
import logging
import logging.config
import logging.handlers # needed for handlers defined there being configurable in logging.conf file
@ -52,7 +53,7 @@ def _log_warning(message, category, filename, lineno, file=None, line=None):
logger.warning(msg)
def setup_logging(stream=None, conf_fname=None, env_var='BORG_LOGGING_CONF', level='info', is_serve=False):
def setup_logging(stream=None, conf_fname=None, env_var='BORG_LOGGING_CONF', level='info', is_serve=False, json=False):
"""setup logging module according to the arguments provided
if conf_fname is given (or the config file name can be determined via
@ -91,7 +92,11 @@ def setup_logging(stream=None, conf_fname=None, env_var='BORG_LOGGING_CONF', lev
fmt = '$LOG %(levelname)s %(name)s Remote: %(message)s'
else:
fmt = '%(message)s'
handler.setFormatter(logging.Formatter(fmt))
formatter = JsonFormatter(fmt) if json else logging.Formatter(fmt)
handler.setFormatter(formatter)
borg_logger = logging.getLogger('borg')
borg_logger.formatter = formatter
borg_logger.json = json
logger.addHandler(handler)
logger.setLevel(level.upper())
configured = True
@ -181,3 +186,31 @@ def critical(self, *args, **kw):
return self.__logger.critical(*args, **kw)
return LazyLogger(name)
class JsonFormatter(logging.Formatter):
RECORD_ATTRIBUTES = (
'created',
'levelname',
'name',
'message',
)
# Other attributes that are not very useful but do exist:
# processName, process, relativeCreated, stack_info, thread, threadName
# msg == message
# *args* are the unformatted arguments passed to the logger function, not useful now,
# become useful if sanitized properly (must be JSON serializable) in the code +
# fixed message IDs are assigned.
# exc_info, exc_text are generally uninteresting because the message will have that
def format(self, record):
super().format(record)
data = {
'type': 'log_message',
}
for attr in self.RECORD_ATTRIBUTES:
value = getattr(record, attr, None)
if value:
data[attr] = value
return json.dumps(data)