1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2025-03-04 10:39:50 +00:00

one datetime formatter to rule them all

# Conflicts:
#	src/borg/helpers.py
This commit is contained in:
Marian Beermann 2017-08-16 17:57:08 +02:00
parent 03dea8c78b
commit a836f451ab
5 changed files with 50 additions and 41 deletions

View file

@ -209,8 +209,9 @@ Standard output
*stdout* is different and more command-dependent than logging. Commands like :ref:`borg_info`, :ref:`borg_create` *stdout* is different and more command-dependent than logging. Commands like :ref:`borg_info`, :ref:`borg_create`
and :ref:`borg_list` implement a ``--json`` option which turns their regular output into a single JSON object. and :ref:`borg_list` implement a ``--json`` option which turns their regular output into a single JSON object.
Dates are formatted according to ISO-8601 with the strftime format string '%a, %Y-%m-%d %H:%M:%S', Dates are formatted according to ISO-8601 in local time. Neither an explicit time zone nor microseconds
e.g. *Sat, 2016-02-25 23:50:06*. are specified *at this time* (subject to change). The equivalent strftime format string is '%Y-%m-%dT%H:%M:%S',
e.g. 2017-08-07T12:27:20.
The root object at least contains a *repository* key with an object containing: The root object at least contains a *repository* key with an object containing:
@ -267,7 +268,7 @@ Example *borg info* output::
}, },
"repository": { "repository": {
"id": "0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23", "id": "0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23",
"last_modified": "Mon, 2017-02-27 21:21:58", "last_modified": "2017-08-07T12:27:20",
"location": "/home/user/testrepo" "location": "/home/user/testrepo"
}, },
"security_dir": "/home/user/.config/borg/security/0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23", "security_dir": "/home/user/.config/borg/security/0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23",
@ -328,7 +329,7 @@ Example of a simple archive listing (``borg list --last 1 --json``)::
{ {
"id": "80cd07219ad725b3c5f665c1dcf119435c4dee1647a560ecac30f8d40221a46a", "id": "80cd07219ad725b3c5f665c1dcf119435c4dee1647a560ecac30f8d40221a46a",
"name": "host-system-backup-2017-02-27", "name": "host-system-backup-2017-02-27",
"start": "Mon, 2017-02-27 21:21:52" "start": "2017-08-07T12:27:20"
} }
], ],
"encryption": { "encryption": {
@ -336,7 +337,7 @@ Example of a simple archive listing (``borg list --last 1 --json``)::
}, },
"repository": { "repository": {
"id": "0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23", "id": "0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23",
"last_modified": "Mon, 2017-02-27 21:21:58", "last_modified": "2017-08-07T12:27:20",
"location": "/home/user/repository" "location": "/home/user/repository"
} }
} }
@ -354,14 +355,14 @@ The same archive with more information (``borg info --last 1 --json``)::
], ],
"comment": "", "comment": "",
"duration": 5.641542, "duration": 5.641542,
"end": "Mon, 2017-02-27 21:21:58", "end": "2017-02-27T12:27:20",
"hostname": "host", "hostname": "host",
"id": "80cd07219ad725b3c5f665c1dcf119435c4dee1647a560ecac30f8d40221a46a", "id": "80cd07219ad725b3c5f665c1dcf119435c4dee1647a560ecac30f8d40221a46a",
"limits": { "limits": {
"max_archive_size": 0.0001330855110409714 "max_archive_size": 0.0001330855110409714
}, },
"name": "host-system-backup-2017-02-27", "name": "host-system-backup-2017-02-27",
"start": "Mon, 2017-02-27 21:21:52", "start": "2017-02-27T12:27:20",
"stats": { "stats": {
"compressed_size": 1880961894, "compressed_size": 1880961894,
"deduplicated_size": 2791, "deduplicated_size": 2791,
@ -387,7 +388,7 @@ The same archive with more information (``borg info --last 1 --json``)::
}, },
"repository": { "repository": {
"id": "0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23", "id": "0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23",
"last_modified": "Mon, 2017-02-27 21:21:58", "last_modified": "2017-08-07T12:27:20",
"location": "/home/user/repository" "location": "/home/user/repository"
} }
} }
@ -405,8 +406,8 @@ Refer to the *borg list* documentation for the available keys and their meaning.
Example (excerpt) of ``borg list --json-lines``:: Example (excerpt) of ``borg list --json-lines``::
{"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux", "healthy": true, "source": "", "linktarget": "", "flags": null, "isomtime": "Sat, 2016-05-07 19:46:01", "size": 0} {"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux", "healthy": true, "source": "", "linktarget": "", "flags": null, "isomtime": "2017-02-27T12:27:20", "size": 0}
{"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux/baz", "healthy": true, "source": "", "linktarget": "", "flags": null, "isomtime": "Sat, 2016-05-07 19:46:01", "size": 0} {"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux/baz", "healthy": true, "source": "", "linktarget": "", "flags": null, "isomtime": "2017-02-27T12:27:20", "size": 0}
.. _msgid: .. _msgid:

View file

@ -33,7 +33,7 @@ from .helpers import ChunkIteratorFileWrapper, open_item
from .helpers import Error, IntegrityError, set_ec from .helpers import Error, IntegrityError, set_ec
from .helpers import uid2user, user2uid, gid2group, group2gid from .helpers import uid2user, user2uid, gid2group, group2gid
from .helpers import parse_timestamp, to_localtime from .helpers import parse_timestamp, to_localtime
from .helpers import format_time, format_timedelta, format_file_size, file_status, FileSize from .helpers import OutputTimestamp, format_timedelta, format_file_size, file_status, FileSize
from .helpers import safe_encode, safe_decode, make_path_safe, remove_surrogates from .helpers import safe_encode, safe_decode, make_path_safe, remove_surrogates
from .helpers import StableDict from .helpers import StableDict
from .helpers import bin_to_hex from .helpers import bin_to_hex
@ -380,8 +380,8 @@ class Archive:
info = { info = {
'name': self.name, 'name': self.name,
'id': self.fpr, 'id': self.fpr,
'start': format_time(to_localtime(start)), 'start': OutputTimestamp(start),
'end': format_time(to_localtime(end)), 'end': OutputTimestamp(end),
'duration': (end - start).total_seconds(), 'duration': (end - start).total_seconds(),
'stats': stats.as_dict(), 'stats': stats.as_dict(),
'limits': { 'limits': {
@ -410,8 +410,8 @@ Number of files: {0.stats.nfiles}
Utilization of max. archive size: {csize_max:.0%} Utilization of max. archive size: {csize_max:.0%}
'''.format( '''.format(
self, self,
start=format_time(to_localtime(self.start.replace(tzinfo=timezone.utc))), start=OutputTimestamp(self.start.replace(tzinfo=timezone.utc)),
end=format_time(to_localtime(self.end.replace(tzinfo=timezone.utc))), end=OutputTimestamp(self.end.replace(tzinfo=timezone.utc)),
csize_max=self.cache.chunks[self.id].csize / MAX_DATA_SIZE) csize_max=self.cache.chunks[self.id].csize / MAX_DATA_SIZE)
def __repr__(self): def __repr__(self):

View file

@ -18,7 +18,7 @@ logger = create_logger()
from .errors import Error from .errors import Error
from .fs import get_keys_dir from .fs import get_keys_dir
from .time import format_time, isoformat_time, to_localtime, safe_timestamp, safe_s from .time import OutputTimestamp, format_time, to_localtime, safe_timestamp, safe_s
from .usergroup import uid2user from .usergroup import uid2user
from .. import __version__ as borg_version from .. import __version__ as borg_version
from .. import __version_tuple__ as borg_version_tuple from .. import __version_tuple__ as borg_version_tuple
@ -549,12 +549,11 @@ class ArchiveFormatter(BaseFormatter):
if self.json: if self.json:
self.item_data = {} self.item_data = {}
self.format_item = self.format_item_json self.format_item = self.format_item_json
self.format_time = self.format_time_json
else: else:
self.item_data = static_keys self.item_data = static_keys
def format_item_json(self, item): def format_item_json(self, item):
return json.dumps(self.get_item_data(item)) + '\n' return json.dumps(self.get_item_data(item), cls=BorgJsonEncoder) + '\n'
def get_item_data(self, archive_info): def get_item_data(self, archive_info):
self.name = archive_info.name self.name = archive_info.name
@ -588,12 +587,7 @@ class ArchiveFormatter(BaseFormatter):
return self.format_time(self.archive.ts_end) return self.format_time(self.archive.ts_end)
def format_time(self, ts): def format_time(self, ts):
t = to_localtime(ts) return OutputTimestamp(ts)
return format_time(t)
def format_time_json(self, ts):
t = to_localtime(ts)
return isoformat_time(t)
class ItemFormatter(BaseFormatter): class ItemFormatter(BaseFormatter):
@ -669,7 +663,6 @@ class ItemFormatter(BaseFormatter):
if self.json_lines: if self.json_lines:
self.item_data = {} self.item_data = {}
self.format_item = self.format_item_json self.format_item = self.format_item_json
self.format_time = self.format_time_json
else: else:
self.item_data = static_keys self.item_data = static_keys
self.format = partial_format(format, static_keys) self.format = partial_format(format, static_keys)
@ -681,19 +674,19 @@ class ItemFormatter(BaseFormatter):
'dcsize': partial(self.sum_unique_chunks_metadata, lambda chunk: chunk.csize), 'dcsize': partial(self.sum_unique_chunks_metadata, lambda chunk: chunk.csize),
'num_chunks': self.calculate_num_chunks, 'num_chunks': self.calculate_num_chunks,
'unique_chunks': partial(self.sum_unique_chunks_metadata, lambda chunk: 1), 'unique_chunks': partial(self.sum_unique_chunks_metadata, lambda chunk: 1),
'isomtime': partial(self.format_time, 'mtime'), 'isomtime': partial(self.format_iso_time, 'mtime'),
'isoctime': partial(self.format_time, 'ctime'), 'isoctime': partial(self.format_iso_time, 'ctime'),
'isoatime': partial(self.format_time, 'atime'), 'isoatime': partial(self.format_iso_time, 'atime'),
'mtime': partial(self.time, 'mtime'), 'mtime': partial(self.format_time, 'mtime'),
'ctime': partial(self.time, 'ctime'), 'ctime': partial(self.format_time, 'ctime'),
'atime': partial(self.time, 'atime'), 'atime': partial(self.format_time, 'atime'),
} }
for hash_function in hashlib.algorithms_guaranteed: for hash_function in hashlib.algorithms_guaranteed:
self.add_key(hash_function, partial(self.hash_item, hash_function)) self.add_key(hash_function, partial(self.hash_item, hash_function))
self.used_call_keys = set(self.call_keys) & self.format_keys self.used_call_keys = set(self.call_keys) & self.format_keys
def format_item_json(self, item): def format_item_json(self, item):
return json.dumps(self.get_item_data(item)) + '\n' return json.dumps(self.get_item_data(item), cls=BorgJsonEncoder) + '\n'
def add_key(self, key, callable_with_item): def add_key(self, key, callable_with_item):
self.call_keys[key] = callable_with_item self.call_keys[key] = callable_with_item
@ -768,15 +761,10 @@ class ItemFormatter(BaseFormatter):
return hash.hexdigest() return hash.hexdigest()
def format_time(self, key, item): def format_time(self, key, item):
t = self.time(key, item) return OutputTimestamp(safe_timestamp(item.get(key) or item.mtime))
return format_time(t)
def format_time_json(self, key, item): def format_iso_time(self, key, item):
t = self.time(key, item) return self.format_time(key, item).to_json()
return isoformat_time(t)
def time(self, key, item):
return safe_timestamp(item.get(key) or item.mtime)
def file_status(mode): def file_status(mode):
@ -887,6 +875,8 @@ class BorgJsonEncoder(json.JSONEncoder):
return { return {
'stats': o.stats(), 'stats': o.stats(),
} }
if callable(getattr(o, 'to_json', None)):
return o.to_json()
return super().default(o) return super().default(o)
@ -899,7 +889,7 @@ def basic_json_data(manifest, *, cache=None, extra=None):
'mode': key.ARG_NAME, 'mode': key.ARG_NAME,
}, },
}) })
data['repository']['last_modified'] = isoformat_time(to_localtime(manifest.last_timestamp.replace(tzinfo=timezone.utc))) data['repository']['last_modified'] = OutputTimestamp(manifest.last_timestamp.replace(tzinfo=timezone.utc))
if key.NAME.startswith('key file'): if key.NAME.startswith('key file'):
data['encryption']['keyfile'] = key.find_key() data['encryption']['keyfile'] = key.find_key()
if cache: if cache:

View file

@ -113,3 +113,19 @@ def format_timedelta(td):
if td.days: if td.days:
txt = '%d days %s' % (td.days, txt) txt = '%d days %s' % (td.days, txt)
return txt return txt
class OutputTimestamp:
def __init__(self, ts: datetime):
if ts.tzinfo == timezone.utc:
ts = to_localtime(ts)
self.ts = ts
def __format__(self, format_spec):
return format_time(self.ts)
def __str__(self):
return '{}'.format(self)
def to_json(self):
return isoformat_time(self.ts)

View file

@ -1326,6 +1326,8 @@ class ArchiverTestCase(ArchiverTestCaseBase):
assert isinstance(archive['duration'], float) assert isinstance(archive['duration'], float)
assert len(archive['id']) == 64 assert len(archive['id']) == 64
assert 'stats' in archive assert 'stats' in archive
assert datetime.strptime(archive['start'], ISO_FORMAT)
assert datetime.strptime(archive['end'], ISO_FORMAT)
def test_comment(self): def test_comment(self):
self.create_regular_file('file1', size=1024 * 80) self.create_regular_file('file1', size=1024 * 80)