mirror of
https://github.com/borgbackup/borg.git
synced 2024-12-25 17:27:31 +00:00
Merge pull request #2998 from ThomasWaldmann/fix-2994
fix .isoformat() issues
This commit is contained in:
commit
67cb76809a
7 changed files with 27 additions and 19 deletions
|
@ -461,8 +461,8 @@ def save(self, name=None, comment=None, timestamp=None, additional_metadata=None
|
|||
'cmdline': sys.argv,
|
||||
'hostname': socket.gethostname(),
|
||||
'username': getuser(),
|
||||
'time': start.isoformat(),
|
||||
'time_end': end.isoformat(),
|
||||
'time': start.strftime(ISO_FORMAT),
|
||||
'time_end': end.strftime(ISO_FORMAT),
|
||||
'chunker_params': self.chunker_params,
|
||||
}
|
||||
metadata.update(additional_metadata or {})
|
||||
|
|
|
@ -66,6 +66,12 @@
|
|||
EXIT_WARNING = 1 # reached normal end of operation, but there were issues
|
||||
EXIT_ERROR = 2 # terminated abruptly, did not reach end of operation
|
||||
|
||||
# never use datetime.isoformat(), it is evil. always use one of these:
|
||||
# datetime.strftime(ISO_FORMAT) # output always includes .microseconds
|
||||
# datetime.strftime(ISO_FORMAT_NO_USECS) # output never includes microseconds
|
||||
ISO_FORMAT_NO_USECS = '%Y-%m-%dT%H:%M:%S'
|
||||
ISO_FORMAT = ISO_FORMAT_NO_USECS + '.%f'
|
||||
|
||||
DASHES = '-' * 78
|
||||
|
||||
PBKDF2_ITERATIONS = 100000
|
||||
|
|
|
@ -64,7 +64,7 @@ def __setitem__(self, name, info):
|
|||
id, ts = info
|
||||
assert isinstance(id, bytes)
|
||||
if isinstance(ts, datetime):
|
||||
ts = ts.replace(tzinfo=None).isoformat()
|
||||
ts = ts.replace(tzinfo=None).strftime(ISO_FORMAT)
|
||||
assert isinstance(ts, str)
|
||||
ts = ts.encode()
|
||||
self._archives[name] = {b'id': id, b'time': ts}
|
||||
|
@ -166,7 +166,7 @@ def id_str(self):
|
|||
|
||||
@property
|
||||
def last_timestamp(self):
|
||||
return datetime.strptime(self.timestamp, "%Y-%m-%dT%H:%M:%S.%f")
|
||||
return parse_timestamp(self.timestamp, tzinfo=None)
|
||||
|
||||
@classmethod
|
||||
def load(cls, repository, operations, key=None, force_tam_not_required=False):
|
||||
|
@ -236,11 +236,11 @@ def write(self):
|
|||
self.config[b'tam_required'] = True
|
||||
# self.timestamp needs to be strictly monotonically increasing. Clocks often are not set correctly
|
||||
if self.timestamp is None:
|
||||
self.timestamp = datetime.utcnow().isoformat()
|
||||
self.timestamp = datetime.utcnow().strftime(ISO_FORMAT)
|
||||
else:
|
||||
prev_ts = self.last_timestamp
|
||||
incremented = (prev_ts + timedelta(microseconds=1)).isoformat()
|
||||
self.timestamp = max(incremented, datetime.utcnow().isoformat())
|
||||
incremented = (prev_ts + timedelta(microseconds=1)).strftime(ISO_FORMAT)
|
||||
self.timestamp = max(incremented, datetime.utcnow().strftime(ISO_FORMAT))
|
||||
# include checks for limits as enforced by limited unpacker (used by load())
|
||||
assert len(self.archives) <= MAX_ARCHIVES
|
||||
assert all(len(name) <= 255 for name in self.archives)
|
||||
|
|
|
@ -137,7 +137,7 @@ def __init__(self, dt):
|
|||
|
||||
def __format__(self, format_spec):
|
||||
if format_spec == '':
|
||||
format_spec = '%Y-%m-%dT%H:%M:%S'
|
||||
format_spec = ISO_FORMAT_NO_USECS
|
||||
return self.dt.__format__(format_spec)
|
||||
|
||||
|
||||
|
|
|
@ -2,18 +2,21 @@
|
|||
import time
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from ..constants import ISO_FORMAT, ISO_FORMAT_NO_USECS
|
||||
|
||||
|
||||
def to_localtime(ts):
|
||||
"""Convert datetime object from UTC to local time zone"""
|
||||
return datetime(*time.localtime((ts - datetime(1970, 1, 1, tzinfo=timezone.utc)).total_seconds())[:6])
|
||||
|
||||
|
||||
def parse_timestamp(timestamp):
|
||||
def parse_timestamp(timestamp, tzinfo=timezone.utc):
|
||||
"""Parse a ISO 8601 timestamp string"""
|
||||
if '.' in timestamp: # microseconds might not be present
|
||||
return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S.%f').replace(tzinfo=timezone.utc)
|
||||
else:
|
||||
return datetime.strptime(timestamp, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc)
|
||||
fmt = ISO_FORMAT if '.' in timestamp else ISO_FORMAT_NO_USECS
|
||||
dt = datetime.strptime(timestamp, fmt)
|
||||
if tzinfo is not None:
|
||||
dt = dt.replace(tzinfo=tzinfo)
|
||||
return dt
|
||||
|
||||
|
||||
def timestamp(s):
|
||||
|
@ -98,7 +101,7 @@ def isoformat_time(ts: datetime):
|
|||
Format *ts* according to ISO 8601.
|
||||
"""
|
||||
# note: first make all datetime objects tz aware before adding %z here.
|
||||
return ts.strftime('%Y-%m-%dT%H:%M:%S.%f')
|
||||
return ts.strftime(ISO_FORMAT)
|
||||
|
||||
|
||||
def format_timedelta(td):
|
||||
|
|
|
@ -540,7 +540,8 @@ def rename_tmp(file):
|
|||
# Log transaction in append-only mode
|
||||
if self.append_only:
|
||||
with open(os.path.join(self.path, 'transactions'), 'a') as log:
|
||||
print('transaction %d, UTC time %s' % (transaction_id, datetime.utcnow().isoformat()), file=log)
|
||||
print('transaction %d, UTC time %s' % (
|
||||
transaction_id, datetime.utcnow().strftime(ISO_FORMAT)), file=log)
|
||||
|
||||
# Write hints file
|
||||
hints_name = 'hints.%d' % transaction_id
|
||||
|
|
|
@ -61,8 +61,6 @@
|
|||
|
||||
src_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
ISO_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
|
||||
|
||||
|
||||
def exec_cmd(*args, archiver=None, fork=False, exe=None, input=b'', binary_output=False, **kw):
|
||||
if fork:
|
||||
|
@ -3037,7 +3035,7 @@ def spoof_manifest(self, repository):
|
|||
'version': 1,
|
||||
'archives': {},
|
||||
'config': {},
|
||||
'timestamp': (datetime.utcnow() + timedelta(days=1)).isoformat(),
|
||||
'timestamp': (datetime.utcnow() + timedelta(days=1)).strftime(ISO_FORMAT),
|
||||
})))
|
||||
repository.commit()
|
||||
|
||||
|
@ -3049,7 +3047,7 @@ def test_fresh_init_tam_required(self):
|
|||
repository.put(Manifest.MANIFEST_ID, key.encrypt(msgpack.packb({
|
||||
'version': 1,
|
||||
'archives': {},
|
||||
'timestamp': (datetime.utcnow() + timedelta(days=1)).isoformat(),
|
||||
'timestamp': (datetime.utcnow() + timedelta(days=1)).strftime(ISO_FORMAT),
|
||||
})))
|
||||
repository.commit()
|
||||
|
||||
|
|
Loading…
Reference in a new issue