mirror of
https://github.com/borgbackup/borg.git
synced 2024-12-29 11:16:43 +00:00
one datetime formatter to rule them all
(cherry picked from commit a836f451ab
)
This commit is contained in:
parent
fa65c9b143
commit
008571228f
4 changed files with 49 additions and 40 deletions
|
@ -209,8 +209,9 @@ Standard output
|
|||
*stdout* is different and more command-dependent than logging. Commands like :ref:`borg_info`, :ref:`borg_create`
|
||||
and :ref:`borg_list` implement a ``--json`` option which turns their regular output into a single JSON object.
|
||||
|
||||
Dates are formatted according to ISO-8601 with the strftime format string '%a, %Y-%m-%d %H:%M:%S',
|
||||
e.g. *Sat, 2016-02-25 23:50:06*.
|
||||
Dates are formatted according to ISO-8601 in local time. Neither an explicit time zone nor microseconds
|
||||
are specified *at this time* (subject to change). The equivalent strftime format string is '%Y-%m-%dT%H:%M:%S',
|
||||
e.g. 2017-08-07T12:27:20.
|
||||
|
||||
The root object at least contains a *repository* key with an object containing:
|
||||
|
||||
|
@ -267,7 +268,7 @@ Example *borg info* output::
|
|||
},
|
||||
"repository": {
|
||||
"id": "0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23",
|
||||
"last_modified": "Mon, 2017-02-27 21:21:58",
|
||||
"last_modified": "2017-08-07T12:27:20",
|
||||
"location": "/home/user/testrepo"
|
||||
},
|
||||
"security_dir": "/home/user/.config/borg/security/0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23",
|
||||
|
@ -328,7 +329,7 @@ Example of a simple archive listing (``borg list --last 1 --json``)::
|
|||
{
|
||||
"id": "80cd07219ad725b3c5f665c1dcf119435c4dee1647a560ecac30f8d40221a46a",
|
||||
"name": "host-system-backup-2017-02-27",
|
||||
"start": "Mon, 2017-02-27 21:21:52"
|
||||
"start": "2017-08-07T12:27:20"
|
||||
}
|
||||
],
|
||||
"encryption": {
|
||||
|
@ -336,7 +337,7 @@ Example of a simple archive listing (``borg list --last 1 --json``)::
|
|||
},
|
||||
"repository": {
|
||||
"id": "0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23",
|
||||
"last_modified": "Mon, 2017-02-27 21:21:58",
|
||||
"last_modified": "2017-08-07T12:27:20",
|
||||
"location": "/home/user/repository"
|
||||
}
|
||||
}
|
||||
|
@ -354,14 +355,14 @@ The same archive with more information (``borg info --last 1 --json``)::
|
|||
],
|
||||
"comment": "",
|
||||
"duration": 5.641542,
|
||||
"end": "Mon, 2017-02-27 21:21:58",
|
||||
"end": "2017-02-27T12:27:20",
|
||||
"hostname": "host",
|
||||
"id": "80cd07219ad725b3c5f665c1dcf119435c4dee1647a560ecac30f8d40221a46a",
|
||||
"limits": {
|
||||
"max_archive_size": 0.0001330855110409714
|
||||
},
|
||||
"name": "host-system-backup-2017-02-27",
|
||||
"start": "Mon, 2017-02-27 21:21:52",
|
||||
"start": "2017-02-27T12:27:20",
|
||||
"stats": {
|
||||
"compressed_size": 1880961894,
|
||||
"deduplicated_size": 2791,
|
||||
|
@ -387,7 +388,7 @@ The same archive with more information (``borg info --last 1 --json``)::
|
|||
},
|
||||
"repository": {
|
||||
"id": "0cbe6166b46627fd26b97f8831e2ca97584280a46714ef84d2b668daf8271a23",
|
||||
"last_modified": "Mon, 2017-02-27 21:21:58",
|
||||
"last_modified": "2017-08-07T12:27:20",
|
||||
"location": "/home/user/repository"
|
||||
}
|
||||
}
|
||||
|
@ -405,8 +406,8 @@ Refer to the *borg list* documentation for the available keys and their meaning.
|
|||
|
||||
Example (excerpt) of ``borg list --json-lines``::
|
||||
|
||||
{"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux", "healthy": true, "source": "", "linktarget": "", "flags": null, "isomtime": "Sat, 2016-05-07 19:46:01", "size": 0}
|
||||
{"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux/baz", "healthy": true, "source": "", "linktarget": "", "flags": null, "isomtime": "Sat, 2016-05-07 19:46:01", "size": 0}
|
||||
{"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux", "healthy": true, "source": "", "linktarget": "", "flags": null, "isomtime": "2017-02-27T12:27:20", "size": 0}
|
||||
{"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux/baz", "healthy": true, "source": "", "linktarget": "", "flags": null, "isomtime": "2017-02-27T12:27:20", "size": 0}
|
||||
|
||||
.. _msgid:
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@
|
|||
from .helpers import Error, IntegrityError, set_ec
|
||||
from .helpers import uid2user, user2uid, gid2group, group2gid
|
||||
from .helpers import parse_timestamp, to_localtime
|
||||
from .helpers import format_time, format_timedelta, format_file_size, file_status, FileSize
|
||||
from .helpers import OutputTimestamp, format_timedelta, format_file_size, file_status, FileSize
|
||||
from .helpers import safe_encode, safe_decode, make_path_safe, remove_surrogates
|
||||
from .helpers import StableDict
|
||||
from .helpers import bin_to_hex
|
||||
|
@ -381,8 +381,8 @@ def info(self):
|
|||
info = {
|
||||
'name': self.name,
|
||||
'id': self.fpr,
|
||||
'start': format_time(to_localtime(start)),
|
||||
'end': format_time(to_localtime(end)),
|
||||
'start': OutputTimestamp(start),
|
||||
'end': OutputTimestamp(end),
|
||||
'duration': (end - start).total_seconds(),
|
||||
'stats': stats.as_dict(),
|
||||
'limits': {
|
||||
|
@ -411,8 +411,8 @@ def __str__(self):
|
|||
Utilization of max. archive size: {csize_max:.0%}
|
||||
'''.format(
|
||||
self,
|
||||
start=format_time(to_localtime(self.start.replace(tzinfo=timezone.utc))),
|
||||
end=format_time(to_localtime(self.end.replace(tzinfo=timezone.utc))),
|
||||
start=OutputTimestamp(self.start.replace(tzinfo=timezone.utc)),
|
||||
end=OutputTimestamp(self.end.replace(tzinfo=timezone.utc)),
|
||||
csize_max=self.cache.chunks[self.id].csize / MAX_DATA_SIZE)
|
||||
|
||||
def __repr__(self):
|
||||
|
|
|
@ -744,6 +744,22 @@ def format_timedelta(td):
|
|||
return txt
|
||||
|
||||
|
||||
class OutputTimestamp:
|
||||
def __init__(self, ts: datetime):
|
||||
if ts.tzinfo == timezone.utc:
|
||||
ts = to_localtime(ts)
|
||||
self.ts = ts
|
||||
|
||||
def __format__(self, format_spec):
|
||||
return format_time(self.ts)
|
||||
|
||||
def __str__(self):
|
||||
return '{}'.format(self)
|
||||
|
||||
def to_json(self):
|
||||
return isoformat_time(self.ts)
|
||||
|
||||
|
||||
def format_file_size(v, precision=2, sign=False):
|
||||
"""Format file size into a human friendly format
|
||||
"""
|
||||
|
@ -1664,12 +1680,11 @@ def __init__(self, format, repository, manifest, key, *, json=False):
|
|||
if self.json:
|
||||
self.item_data = {}
|
||||
self.format_item = self.format_item_json
|
||||
self.format_time = self.format_time_json
|
||||
else:
|
||||
self.item_data = static_keys
|
||||
|
||||
def format_item_json(self, item):
|
||||
return json.dumps(self.get_item_data(item)) + '\n'
|
||||
return json.dumps(self.get_item_data(item), cls=BorgJsonEncoder) + '\n'
|
||||
|
||||
def get_item_data(self, archive_info):
|
||||
self.name = archive_info.name
|
||||
|
@ -1703,12 +1718,7 @@ def get_ts_end(self):
|
|||
return self.format_time(self.archive.ts_end)
|
||||
|
||||
def format_time(self, ts):
|
||||
t = to_localtime(ts)
|
||||
return format_time(t)
|
||||
|
||||
def format_time_json(self, ts):
|
||||
t = to_localtime(ts)
|
||||
return isoformat_time(t)
|
||||
return OutputTimestamp(ts)
|
||||
|
||||
|
||||
class ItemFormatter(BaseFormatter):
|
||||
|
@ -1784,7 +1794,6 @@ def __init__(self, archive, format, *, json_lines=False):
|
|||
if self.json_lines:
|
||||
self.item_data = {}
|
||||
self.format_item = self.format_item_json
|
||||
self.format_time = self.format_time_json
|
||||
else:
|
||||
self.item_data = static_keys
|
||||
self.format = partial_format(format, static_keys)
|
||||
|
@ -1796,19 +1805,19 @@ def __init__(self, archive, format, *, json_lines=False):
|
|||
'dcsize': partial(self.sum_unique_chunks_metadata, lambda chunk: chunk.csize),
|
||||
'num_chunks': self.calculate_num_chunks,
|
||||
'unique_chunks': partial(self.sum_unique_chunks_metadata, lambda chunk: 1),
|
||||
'isomtime': partial(self.format_time, 'mtime'),
|
||||
'isoctime': partial(self.format_time, 'ctime'),
|
||||
'isoatime': partial(self.format_time, 'atime'),
|
||||
'mtime': partial(self.time, 'mtime'),
|
||||
'ctime': partial(self.time, 'ctime'),
|
||||
'atime': partial(self.time, 'atime'),
|
||||
'isomtime': partial(self.format_iso_time, 'mtime'),
|
||||
'isoctime': partial(self.format_iso_time, 'ctime'),
|
||||
'isoatime': partial(self.format_iso_time, 'atime'),
|
||||
'mtime': partial(self.format_time, 'mtime'),
|
||||
'ctime': partial(self.format_time, 'ctime'),
|
||||
'atime': partial(self.format_time, 'atime'),
|
||||
}
|
||||
for hash_function in hashlib.algorithms_guaranteed:
|
||||
self.add_key(hash_function, partial(self.hash_item, hash_function))
|
||||
self.used_call_keys = set(self.call_keys) & self.format_keys
|
||||
|
||||
def format_item_json(self, item):
|
||||
return json.dumps(self.get_item_data(item)) + '\n'
|
||||
return json.dumps(self.get_item_data(item), cls=BorgJsonEncoder) + '\n'
|
||||
|
||||
def add_key(self, key, callable_with_item):
|
||||
self.call_keys[key] = callable_with_item
|
||||
|
@ -1883,15 +1892,10 @@ def hash_item(self, hash_function, item):
|
|||
return hash.hexdigest()
|
||||
|
||||
def format_time(self, key, item):
|
||||
t = self.time(key, item)
|
||||
return format_time(t)
|
||||
return OutputTimestamp(safe_timestamp(item.get(key) or item.mtime))
|
||||
|
||||
def format_time_json(self, key, item):
|
||||
t = self.time(key, item)
|
||||
return isoformat_time(t)
|
||||
|
||||
def time(self, key, item):
|
||||
return safe_timestamp(item.get(key) or item.mtime)
|
||||
def format_iso_time(self, key, item):
|
||||
return self.format_time(key, item).to_json()
|
||||
|
||||
|
||||
class ChunkIteratorFileWrapper:
|
||||
|
@ -2204,6 +2208,8 @@ def default(self, o):
|
|||
return {
|
||||
'stats': o.stats(),
|
||||
}
|
||||
if callable(getattr(o, 'to_json', None)):
|
||||
return o.to_json()
|
||||
return super().default(o)
|
||||
|
||||
|
||||
|
@ -2216,7 +2222,7 @@ def basic_json_data(manifest, *, cache=None, extra=None):
|
|||
'mode': key.ARG_NAME,
|
||||
},
|
||||
})
|
||||
data['repository']['last_modified'] = isoformat_time(to_localtime(manifest.last_timestamp.replace(tzinfo=timezone.utc)))
|
||||
data['repository']['last_modified'] = OutputTimestamp(manifest.last_timestamp.replace(tzinfo=timezone.utc))
|
||||
if key.NAME.startswith('key file'):
|
||||
data['encryption']['keyfile'] = key.find_key()
|
||||
if cache:
|
||||
|
|
|
@ -1325,6 +1325,8 @@ def test_info_json(self):
|
|||
assert isinstance(archive['duration'], float)
|
||||
assert len(archive['id']) == 64
|
||||
assert 'stats' in archive
|
||||
assert datetime.strptime(archive['start'], ISO_FORMAT)
|
||||
assert datetime.strptime(archive['end'], ISO_FORMAT)
|
||||
|
||||
def test_comment(self):
|
||||
self.create_regular_file('file1', size=1024 * 80)
|
||||
|
|
Loading…
Reference in a new issue