1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2025-02-22 22:22:27 +00:00

Revert "don't do "bigint" conversion for nanosecond mtime"

This reverts commit 8b2e7ec680.

We still need the bigint stuff for borg 1.0 compatibility.

# Conflicts:
#	src/borg/cache.py
This commit is contained in:
Thomas Waldmann 2017-03-21 02:21:32 +01:00
parent 04dba76fc9
commit f708183743
3 changed files with 19 additions and 7 deletions

View file

@ -15,7 +15,7 @@
from .helpers import Location
from .helpers import Error
from .helpers import get_cache_dir, get_security_dir
from .helpers import bin_to_hex
from .helpers import int_to_bigint, bigint_to_int, bin_to_hex
from .helpers import format_file_size
from .helpers import safe_ns
from .helpers import yes
@ -354,7 +354,7 @@ def commit(self):
# this is to avoid issues with filesystem snapshots and mtime granularity.
# Also keep files from older backups that have not reached BORG_FILES_CACHE_TTL yet.
entry = FileCacheEntry(*msgpack.unpackb(item))
if entry.age == 0 and entry.mtime < self._newest_mtime or \
if entry.age == 0 and bigint_to_int(entry.mtime) < self._newest_mtime or \
entry.age > 0 and entry.age < ttl:
msgpack.pack((path_hash, entry), fd)
pi.output('Saving cache config')
@ -574,7 +574,7 @@ def file_known_and_unchanged(self, path_hash, st, ignore_inode=False):
if not entry:
return None
entry = FileCacheEntry(*msgpack.unpackb(entry))
if (entry.size == st.st_size and entry.mtime == st.st_mtime_ns and
if (entry.size == st.st_size and bigint_to_int(entry.mtime) == st.st_mtime_ns and
(ignore_inode or entry.inode == st.st_ino)):
# we ignored the inode number in the comparison above or it is still same.
# if it is still the same, replacing it in the tuple doesn't change it.
@ -593,6 +593,6 @@ def memorize_file(self, path_hash, st, ids):
if not (self.do_files and stat.S_ISREG(st.st_mode)):
return
mtime_ns = safe_ns(st.st_mtime_ns)
entry = FileCacheEntry(age=0, inode=st.st_ino, size=st.st_size, mtime=mtime_ns, chunk_ids=ids)
entry = FileCacheEntry(age=0, inode=st.st_ino, size=st.st_size, mtime=int_to_bigint(mtime_ns), chunk_ids=ids)
self.files[path_hash] = msgpack.packb(entry)
self._newest_mtime = max(self._newest_mtime or 0, mtime_ns)

View file

@ -2,6 +2,7 @@ from collections import namedtuple
from .constants import ITEM_KEYS
from .helpers import safe_encode, safe_decode
from .helpers import bigint_to_int, int_to_bigint
from .helpers import StableDict
API_VERSION = '1.1_02'
@ -156,9 +157,9 @@ class Item(PropDict):
rdev = PropDict._make_property('rdev', int)
bsdflags = PropDict._make_property('bsdflags', int)
atime = PropDict._make_property('atime', int)
ctime = PropDict._make_property('ctime', int)
mtime = PropDict._make_property('mtime', int)
atime = PropDict._make_property('atime', int, 'bigint', encode=int_to_bigint, decode=bigint_to_int)
ctime = PropDict._make_property('ctime', int, 'bigint', encode=int_to_bigint, decode=bigint_to_int)
mtime = PropDict._make_property('mtime', int, 'bigint', encode=int_to_bigint, decode=bigint_to_int)
# size is only present for items with a chunk list and then it is sum(chunk_sizes)
# compatibility note: this is a new feature, in old archives size will be missing.

View file

@ -77,6 +77,17 @@ def test_item_int_property():
item.mode = "invalid"
def test_item_bigint_property():
item = Item()
small, big = 42, 2 ** 65
item.atime = small
assert item.atime == small
assert item.as_dict() == {'atime': small}
item.atime = big
assert item.atime == big
assert item.as_dict() == {'atime': b'\0' * 8 + b'\x02'}
def test_item_user_group_none():
item = Item()
item.user = None