mirror of https://github.com/borgbackup/borg.git
Item: symlinks: rename .source to .target, fixes #7245
Also, in JSON: - rename "linktarget" to "target" for symlinks - remove "source" for symlinks
This commit is contained in:
parent
1df7c244c4
commit
1672aee031
|
@ -472,8 +472,9 @@ Refer to the *borg list* documentation for the available keys and their meaning.
|
|||
|
||||
Example (excerpt) of ``borg list --json-lines``::
|
||||
|
||||
{"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux", "healthy": true, "source": "", "linktarget": "", "flags": null, "mtime": "2017-02-27T12:27:20.023407", "size": 0}
|
||||
{"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux/baz", "healthy": true, "source": "", "linktarget": "", "flags": null, "mtime": "2017-02-27T12:27:20.585407", "size": 0}
|
||||
{"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux", "healthy": true, "target": "", "flags": null, "mtime": "2017-02-27T12:27:20.023407", "size": 0}
|
||||
{"type": "d", "mode": "drwxr-xr-x", "user": "user", "group": "user", "uid": 1000, "gid": 1000, "path": "linux/baz", "healthy": true, "target": "", "flags": null, "mtime": "2017-02-27T12:27:20.585407", "size": 0}
|
||||
|
||||
|
||||
Archive Differencing
|
||||
++++++++++++++++++++
|
||||
|
|
|
@ -898,11 +898,11 @@ Duration: {0.duration}
|
|||
if hardlink_set:
|
||||
# unusual, but possible: this is a hardlinked symlink.
|
||||
return
|
||||
source = item.source
|
||||
target = item.target
|
||||
try:
|
||||
os.symlink(source, path)
|
||||
os.symlink(target, path)
|
||||
except UnicodeEncodeError:
|
||||
raise self.IncompatibleFilesystemEncodingError(source, sys.getfilesystemencoding()) from None
|
||||
raise self.IncompatibleFilesystemEncodingError(target, sys.getfilesystemencoding()) from None
|
||||
self.restore_attrs(path, item, symlink=True)
|
||||
elif stat.S_ISFIFO(mode):
|
||||
make_parent(path)
|
||||
|
@ -1397,8 +1397,8 @@ class FilesystemObjectProcessors:
|
|||
with self.create_helper(path, st, "s", hardlinkable=True) as (item, status, hardlinked):
|
||||
fname = name if name is not None and parent_fd is not None else path
|
||||
with backup_io("readlink"):
|
||||
source = os.readlink(fname, dir_fd=parent_fd)
|
||||
item.source = source
|
||||
target = os.readlink(fname, dir_fd=parent_fd)
|
||||
item.target = target
|
||||
item.update(self.metadata_collector.stat_attrs(st, path)) # can't use FD here?
|
||||
return status
|
||||
|
||||
|
@ -1589,7 +1589,7 @@ class TarfileObjectProcessors:
|
|||
|
||||
def process_symlink(self, *, tarinfo, status, type):
|
||||
with self.create_helper(tarinfo, status, type) as (item, status):
|
||||
item.source = tarinfo.linkname
|
||||
item.target = tarinfo.linkname
|
||||
return status
|
||||
|
||||
def process_hardlink(self, *, tarinfo, status, type):
|
||||
|
|
|
@ -163,7 +163,7 @@ class TarMixIn:
|
|||
tarinfo.type = tarfile.DIRTYPE
|
||||
elif modebits == stat.S_IFLNK:
|
||||
tarinfo.type = tarfile.SYMTYPE
|
||||
tarinfo.linkname = item.source
|
||||
tarinfo.linkname = item.target
|
||||
elif modebits == stat.S_IFBLK:
|
||||
tarinfo.type = tarfile.BLKTYPE
|
||||
tarinfo.devmajor = os.major(item.rdev)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# this set must be kept complete, otherwise the RobustUnpacker might malfunction:
|
||||
# fmt: off
|
||||
ITEM_KEYS = frozenset(['path', 'source', 'rdev', 'chunks', 'chunks_healthy', 'hardlink_master', 'hlid',
|
||||
ITEM_KEYS = frozenset(['path', 'source', 'target', 'rdev', 'chunks', 'chunks_healthy', 'hardlink_master', 'hlid',
|
||||
'mode', 'user', 'group', 'uid', 'gid', 'mtime', 'atime', 'ctime', 'birthtime', 'size',
|
||||
'xattrs', 'bsdflags', 'acl_nfs4', 'acl_access', 'acl_default', 'acl_extended',
|
||||
'part'])
|
||||
|
|
|
@ -720,4 +720,4 @@ class FuseOperations(llfuse.Operations, FuseBackend):
|
|||
@async_wrapper
|
||||
def readlink(self, inode, ctx=None):
|
||||
item = self.get_item(inode)
|
||||
return os.fsencode(item.source)
|
||||
return os.fsencode(item.target)
|
||||
|
|
|
@ -782,9 +782,9 @@ class ItemFormatter(BaseFormatter):
|
|||
hash_algorithms = set(hashlib.algorithms_guaranteed).union({"xxh64"}).difference({"shake_128", "shake_256"})
|
||||
KEY_DESCRIPTIONS = {
|
||||
"path": "file path",
|
||||
"source": "link target for symlinks (identical to linktarget)",
|
||||
"target": "link target for symlinks",
|
||||
"hlid": "hard link identity (same if hardlinking same fs object)",
|
||||
"extra": 'prepends {source} with " -> " for soft links and " link to " for hard links',
|
||||
"extra": 'prepends {target} with " -> " for soft links and " link to " for hard links',
|
||||
"dsize": "deduplicated size",
|
||||
"num_chunks": "number of chunks in this file",
|
||||
"unique_chunks": "number of unique chunks in this file",
|
||||
|
@ -792,7 +792,7 @@ class ItemFormatter(BaseFormatter):
|
|||
"health": 'either "healthy" (file ok) or "broken" (if file has all-zero replacement chunks)',
|
||||
}
|
||||
KEY_GROUPS = (
|
||||
("type", "mode", "uid", "gid", "user", "group", "path", "source", "linktarget", "hlid", "flags"),
|
||||
("type", "mode", "uid", "gid", "user", "group", "path", "target", "hlid", "flags"),
|
||||
("size", "dsize", "num_chunks", "unique_chunks"),
|
||||
("mtime", "ctime", "atime", "isomtime", "isoctime", "isoatime"),
|
||||
tuple(sorted(hash_algorithms)),
|
||||
|
@ -878,11 +878,10 @@ class ItemFormatter(BaseFormatter):
|
|||
item_data.update(self.item_data)
|
||||
|
||||
item_data.update(text_to_json("path", item.path))
|
||||
source = item.get("source", "")
|
||||
item_data.update(text_to_json("source", source))
|
||||
item_data.update(text_to_json("linktarget", source))
|
||||
target = item.get("target", "")
|
||||
item_data.update(text_to_json("target", target))
|
||||
if not self.json_lines:
|
||||
item_data["extra"] = "" if not source else f" -> {item_data['source']}"
|
||||
item_data["extra"] = "" if not target else f" -> {item_data['target']}"
|
||||
|
||||
hlid = item.get("hlid")
|
||||
hlid = bin_to_hex(hlid) if hlid else ""
|
||||
|
|
|
@ -262,7 +262,8 @@ cdef class Item(PropDict):
|
|||
# properties statically defined, so that IDEs can know their names:
|
||||
|
||||
path = PropDictProperty(str, 'surrogate-escaped str')
|
||||
source = PropDictProperty(str, 'surrogate-escaped str')
|
||||
source = PropDictProperty(str, 'surrogate-escaped str') # legacy borg 1.x. borg 2: see .target
|
||||
target = PropDictProperty(str, 'surrogate-escaped str')
|
||||
user = PropDictProperty(str, 'surrogate-escaped str')
|
||||
group = PropDictProperty(str, 'surrogate-escaped str')
|
||||
|
||||
|
@ -315,7 +316,9 @@ cdef class Item(PropDict):
|
|||
except AttributeError:
|
||||
if stat.S_ISLNK(self.mode):
|
||||
# get out of here quickly. symlinks have no own chunks, their fs size is the length of the target name.
|
||||
return len(self.source)
|
||||
if 'source' in self: # legacy borg 1.x archives
|
||||
return len(self.source)
|
||||
return len(self.target)
|
||||
# no precomputed (c)size value available, compute it:
|
||||
try:
|
||||
chunks = getattr(self, 'chunks')
|
||||
|
@ -383,7 +386,7 @@ cdef class Item(PropDict):
|
|||
# borg 1 stored some "not known" values with a None value.
|
||||
# borg 2 policy for such cases is to just not have the key/value pair.
|
||||
continue
|
||||
if k in ('path', 'source', 'user', 'group'):
|
||||
if k in ('path', 'source', 'target', 'user', 'group'):
|
||||
v = fix_str_value(d, k)
|
||||
if k in ('chunks', 'chunks_healthy'):
|
||||
v = fix_list_of_chunkentries(v)
|
||||
|
@ -665,15 +668,15 @@ class ItemDiff:
|
|||
if self._item1.get('deleted') and self._item2.get('deleted'):
|
||||
return True
|
||||
|
||||
attr_list = ['deleted', 'mode', 'source']
|
||||
attr_list = ['deleted', 'mode', 'target']
|
||||
attr_list += ['uid', 'gid'] if self._numeric_ids else ['user', 'group']
|
||||
for attr in attr_list:
|
||||
if self._item1.get(attr) != self._item2.get(attr):
|
||||
return False
|
||||
|
||||
if 'mode' in self._item1: # mode of item1 and item2 is equal
|
||||
if (self._item1.is_link() and 'source' in self._item1 and 'source' in self._item2
|
||||
and self._item1.source != self._item2.source):
|
||||
if (self._item1.is_link() and 'target' in self._item1 and 'target' in self._item2
|
||||
and self._item1.target != self._item2.target):
|
||||
return False
|
||||
|
||||
if 'chunks' in self._item1 and 'chunks' in self._item2:
|
||||
|
@ -693,7 +696,7 @@ class ItemDiff:
|
|||
pd = self._presence_diff('link')
|
||||
if pd is not None:
|
||||
return pd
|
||||
if 'source' in self._item1 and 'source' in self._item2 and self._item1.source != self._item2.source:
|
||||
if 'target' in self._item1 and 'target' in self._item2 and self._item1.target != self._item2.target:
|
||||
return ({"type": 'changed link'}, 'changed link')
|
||||
|
||||
def _content_diff(self):
|
||||
|
|
|
@ -149,7 +149,7 @@ class ChunkBufferTestCase(BaseTestCase):
|
|||
|
||||
def test_partial(self):
|
||||
big = "0123456789abcdefghijklmnopqrstuvwxyz" * 25000
|
||||
data = [Item(path="full", source=big), Item(path="partial", source=big)]
|
||||
data = [Item(path="full", target=big), Item(path="partial", target=big)]
|
||||
cache = MockCache()
|
||||
key = PlaintextKey(None)
|
||||
chunks = CacheChunkBuffer(cache, key, None)
|
||||
|
|
|
@ -153,6 +153,12 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
|||
# Note: healthy == True indicates the *absence* of the additional chunks_healthy list
|
||||
del g["hlid"]
|
||||
|
||||
# borg 1 used "linktarget" and "source" for links, borg 2 uses "target" for symlinks.
|
||||
if g["target"] == e["linktarget"]:
|
||||
e["target"] = e["linktarget"]
|
||||
del e["linktarget"]
|
||||
del e["source"]
|
||||
|
||||
if e["type"] == "b" and is_win32:
|
||||
# The S_IFBLK macro is broken on MINGW
|
||||
del e["type"], g["type"]
|
||||
|
@ -197,6 +203,7 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
|||
chunks1 = item.chunks
|
||||
size1 = item.size
|
||||
assert "source" not in item
|
||||
assert "target" not in item
|
||||
assert "hardlink_master" not in item
|
||||
elif item.path.endswith("hardlink2"):
|
||||
assert stat.S_ISREG(item.mode)
|
||||
|
@ -207,15 +214,16 @@ class ArchiverTestCase(ArchiverTestCaseBase):
|
|||
chunks2 = item.chunks
|
||||
size2 = item.size
|
||||
assert "source" not in item
|
||||
assert "target" not in item
|
||||
assert "hardlink_master" not in item
|
||||
elif item.path.endswith("broken_symlink"):
|
||||
assert stat.S_ISLNK(item.mode)
|
||||
assert item.source == "doesnotexist"
|
||||
assert item.target == "doesnotexist"
|
||||
assert item.uid > 0
|
||||
assert "hlid" not in item
|
||||
elif item.path.endswith("symlink"):
|
||||
assert stat.S_ISLNK(item.mode)
|
||||
assert item.source == "target"
|
||||
assert item.target == "target"
|
||||
assert item.uid > 0
|
||||
assert "hlid" not in item
|
||||
elif item.path.endswith("fifo"):
|
||||
|
|
|
@ -52,7 +52,6 @@ class UpgraderFrom12To20:
|
|||
"""upgrade item as needed, get rid of legacy crap"""
|
||||
ITEM_KEY_WHITELIST = {
|
||||
"path",
|
||||
"source",
|
||||
"rdev",
|
||||
"chunks",
|
||||
"chunks_healthy",
|
||||
|
@ -92,7 +91,12 @@ class UpgraderFrom12To20:
|
|||
# make sure we only have desired stuff in the new item. specifically, make sure to get rid of:
|
||||
# - 'acl' remnants of bug in attic <= 0.13
|
||||
# - 'hardlink_master' (superseded by hlid)
|
||||
new_item_dict = {key: value for key, value in item.as_dict().items() if key in ITEM_KEY_WHITELIST}
|
||||
item_dict = item.as_dict()
|
||||
new_item_dict = {key: value for key, value in item_dict.items() if key in ITEM_KEY_WHITELIST}
|
||||
# symlink targets were .source for borg1, but borg2 uses .target:
|
||||
if "source" in item_dict:
|
||||
new_item_dict["target"] = item_dict["source"]
|
||||
assert "source" not in new_item_dict
|
||||
# remove some pointless entries older borg put in there:
|
||||
for key in "user", "group":
|
||||
if key in new_item_dict and new_item_dict[key] is None:
|
||||
|
|
Loading…
Reference in New Issue