mirror of https://github.com/borgbackup/borg.git
clamp (nano)second values to unproblematic range, fixes #2304
filesystem -> clamp -> archive (create)
(cherry picked from commit b7a17a6db7
)
This commit is contained in:
parent
d1858ff845
commit
07bcd29144
|
@ -19,7 +19,7 @@ from . import xattr
|
|||
from .helpers import Error, uid2user, user2uid, gid2group, group2gid, bin_to_hex, \
|
||||
parse_timestamp, to_localtime, format_time, format_timedelta, remove_surrogates, \
|
||||
Manifest, Statistics, decode_dict, make_path_safe, StableDict, int_to_bigint, bigint_to_int, \
|
||||
ProgressIndicatorPercent, IntegrityError, set_ec, EXIT_WARNING
|
||||
ProgressIndicatorPercent, IntegrityError, set_ec, EXIT_WARNING, safe_ns
|
||||
from .platform import acl_get, acl_set
|
||||
from .chunker import Chunker
|
||||
from .hashindex import ChunkIndex
|
||||
|
@ -588,15 +588,15 @@ Number of files: {0.stats.nfiles}'''.format(
|
|||
b'mode': st.st_mode,
|
||||
b'uid': st.st_uid, b'user': uid2user(st.st_uid),
|
||||
b'gid': st.st_gid, b'group': gid2group(st.st_gid),
|
||||
b'mtime': int_to_bigint(st.st_mtime_ns),
|
||||
b'mtime': int_to_bigint(safe_ns(st.st_mtime_ns)),
|
||||
}
|
||||
# borg can work with archives only having mtime (older attic archives do not have
|
||||
# atime/ctime). it can be useful to omit atime/ctime, if they change without the
|
||||
# file content changing - e.g. to get better metadata deduplication.
|
||||
if not self.noatime:
|
||||
item[b'atime'] = int_to_bigint(st.st_atime_ns)
|
||||
item[b'atime'] = int_to_bigint(safe_ns(st.st_atime_ns))
|
||||
if not self.noctime:
|
||||
item[b'ctime'] = int_to_bigint(st.st_ctime_ns)
|
||||
item[b'ctime'] = int_to_bigint(safe_ns(st.st_ctime_ns))
|
||||
if self.numeric_owner:
|
||||
item[b'user'] = item[b'group'] = None
|
||||
with backup_io():
|
||||
|
|
|
@ -10,7 +10,7 @@ from .key import PlaintextKey
|
|||
from .logger import create_logger
|
||||
logger = create_logger()
|
||||
from .helpers import Error, get_cache_dir, decode_dict, int_to_bigint, \
|
||||
bigint_to_int, format_file_size, yes, bin_to_hex, Location
|
||||
bigint_to_int, format_file_size, yes, bin_to_hex, Location, safe_ns
|
||||
from .locking import Lock
|
||||
from .hashindex import ChunkIndex
|
||||
|
||||
|
@ -461,6 +461,6 @@ Chunk index: {0.total_unique_chunks:20d} {0.total_chunks:20d}"""
|
|||
if not (self.do_files and stat.S_ISREG(st.st_mode)):
|
||||
return
|
||||
# Entry: Age, inode, size, mtime, chunk ids
|
||||
mtime_ns = st.st_mtime_ns
|
||||
mtime_ns = safe_ns(st.st_mtime_ns)
|
||||
self.files[path_hash] = msgpack.packb((0, st.st_ino, st.st_size, int_to_bigint(mtime_ns), ids))
|
||||
self._newest_mtime = max(self._newest_mtime or 0, mtime_ns)
|
||||
|
|
|
@ -617,7 +617,7 @@ def timestamp(s):
|
|||
"""Convert a --timestamp=s argument to a datetime object"""
|
||||
try:
|
||||
# is it pointing to a file / directory?
|
||||
ts = os.stat(s).st_mtime
|
||||
ts = safe_s(os.stat(s).st_mtime)
|
||||
return datetime.utcfromtimestamp(ts)
|
||||
except OSError:
|
||||
# didn't work, try parsing as timestamp. UTC, no TZ, no microsecs support.
|
||||
|
@ -728,12 +728,34 @@ def replace_placeholders(text):
|
|||
return format_line(text, data)
|
||||
|
||||
|
||||
# Not too rarely, we get crappy timestamps from the fs, that overflow some computations.
|
||||
# As they are crap anyway, nothing is lost if we just clamp them to the max valid value.
|
||||
# msgpack can only pack uint64. datetime is limited to year 9999.
|
||||
MAX_NS = 18446744073000000000 # less than 2**64 - 1 ns. also less than y9999.
|
||||
MAX_S = MAX_NS // 1000000000
|
||||
|
||||
|
||||
def safe_s(ts):
|
||||
if 0 <= ts <= MAX_S:
|
||||
return ts
|
||||
elif ts < 0:
|
||||
return 0
|
||||
else:
|
||||
return MAX_S
|
||||
|
||||
|
||||
def safe_ns(ts):
|
||||
if 0 <= ts <= MAX_NS:
|
||||
return ts
|
||||
elif ts < 0:
|
||||
return 0
|
||||
else:
|
||||
return MAX_NS
|
||||
|
||||
|
||||
def safe_timestamp(item_timestamp_ns):
|
||||
try:
|
||||
return datetime.fromtimestamp(bigint_to_int(item_timestamp_ns) / 1e9)
|
||||
except OverflowError:
|
||||
# likely a broken file time and datetime did not want to go beyond year 9999
|
||||
return datetime(9999, 12, 31, 23, 59, 59)
|
||||
t_ns = safe_ns(bigint_to_int(item_timestamp_ns))
|
||||
return datetime.fromtimestamp(t_ns / 1e9)
|
||||
|
||||
|
||||
def format_time(t):
|
||||
|
|
|
@ -17,7 +17,8 @@ from ..helpers import Location, format_file_size, format_timedelta, format_line,
|
|||
StableDict, int_to_bigint, bigint_to_int, parse_timestamp, CompressionSpec, ChunkerParams, \
|
||||
ProgressIndicatorPercent, ProgressIndicatorEndless, parse_pattern, load_exclude_file, load_pattern_file, \
|
||||
PatternMatcher, RegexPattern, PathPrefixPattern, FnmatchPattern, ShellPattern, \
|
||||
Buffer
|
||||
Buffer, safe_ns, safe_s
|
||||
|
||||
from . import BaseTestCase, FakeInputs
|
||||
|
||||
|
||||
|
@ -1115,3 +1116,18 @@ def test_format_line_erroneous():
|
|||
assert format_line('{invalid}', data)
|
||||
with pytest.raises(PlaceholderError):
|
||||
assert format_line('{}', data)
|
||||
|
||||
|
||||
def test_safe_timestamps():
|
||||
# ns fit into uint64
|
||||
assert safe_ns(2 ** 64) < 2 ** 64
|
||||
assert safe_ns(-1) == 0
|
||||
# s are so that their ns conversion fits into uint64
|
||||
assert safe_s(2 ** 64) * 1000000000 < 2 ** 64
|
||||
assert safe_s(-1) == 0
|
||||
# datetime won't fall over its y10k problem
|
||||
beyond_y10k = 2 ** 100
|
||||
with pytest.raises(OverflowError):
|
||||
datetime.utcfromtimestamp(beyond_y10k)
|
||||
assert datetime.utcfromtimestamp(safe_s(beyond_y10k)) > datetime(2500, 12, 31)
|
||||
assert datetime.utcfromtimestamp(safe_ns(beyond_y10k) / 1000000000) > datetime(2500, 12, 31)
|
||||
|
|
Loading…
Reference in New Issue