mirror of
https://github.com/borgbackup/borg.git
synced 2025-02-24 15:12:00 +00:00
PR #224 - Merge branch 'issue_55' into merge
This commit is contained in:
commit
2c2dd21e0e
2 changed files with 41 additions and 8 deletions
|
@ -369,21 +369,33 @@ def stat_attrs(self, st, path):
|
|||
acl_get(path, item, st, self.numeric_owner)
|
||||
return item
|
||||
|
||||
def process_item(self, path, st):
|
||||
def process_dir(self, path, st):
|
||||
item = {b'path': make_path_safe(path)}
|
||||
item.update(self.stat_attrs(st, path))
|
||||
self.add_item(item)
|
||||
return 'd' # directory
|
||||
|
||||
def process_fifo(self, path, st):
|
||||
item = {b'path': make_path_safe(path)}
|
||||
item.update(self.stat_attrs(st, path))
|
||||
self.add_item(item)
|
||||
return 'f' # fifo
|
||||
|
||||
def process_dev(self, path, st):
|
||||
item = {b'path': make_path_safe(path), b'rdev': st.st_rdev}
|
||||
item.update(self.stat_attrs(st, path))
|
||||
self.add_item(item)
|
||||
if stat.S_ISCHR(st.st_mode):
|
||||
return 'c' # char device
|
||||
elif stat.S_ISBLK(st.st_mode):
|
||||
return 'b' # block device
|
||||
|
||||
def process_symlink(self, path, st):
|
||||
source = os.readlink(path)
|
||||
item = {b'path': make_path_safe(path), b'source': source}
|
||||
item.update(self.stat_attrs(st, path))
|
||||
self.add_item(item)
|
||||
return 's' # symlink
|
||||
|
||||
def process_stdin(self, path, cache):
|
||||
uid, gid = 0, 0
|
||||
|
@ -403,6 +415,7 @@ def process_stdin(self, path, cache):
|
|||
self.add_item(item)
|
||||
|
||||
def process_file(self, path, st, cache):
|
||||
status = None
|
||||
safe_path = make_path_safe(path)
|
||||
# Is it a hard link?
|
||||
if st.st_nlink > 1:
|
||||
|
@ -411,7 +424,8 @@ def process_file(self, path, st, cache):
|
|||
item = self.stat_attrs(st, path)
|
||||
item.update({b'path': safe_path, b'source': source})
|
||||
self.add_item(item)
|
||||
return
|
||||
status = 'h' # regular file, hardlink (to already seen inodes)
|
||||
return status
|
||||
else:
|
||||
self.hard_links[st.st_ino, st.st_dev] = safe_path
|
||||
path_hash = self.key.id_hash(os.path.join(self.cwd, path).encode('utf-8', 'surrogateescape'))
|
||||
|
@ -424,6 +438,9 @@ def process_file(self, path, st, cache):
|
|||
break
|
||||
else:
|
||||
chunks = [cache.chunk_incref(id_, self.stats) for id_ in ids]
|
||||
status = 'U' # regular file, unchanged
|
||||
else:
|
||||
status = 'A' # regular file, added
|
||||
# Only chunkify the file if needed
|
||||
if chunks is None:
|
||||
with Archive._open_rb(path, st) as fd:
|
||||
|
@ -431,10 +448,12 @@ def process_file(self, path, st, cache):
|
|||
for chunk in self.chunker.chunkify(fd):
|
||||
chunks.append(cache.add_chunk(self.key.id_hash(chunk), chunk, self.stats))
|
||||
cache.memorize_file(path_hash, st, [c[0] for c in chunks])
|
||||
status = status or 'M' # regular file, modified (if not 'A' already)
|
||||
item = {b'path': safe_path, b'chunks': chunks}
|
||||
item.update(self.stat_attrs(st, path))
|
||||
self.stats.nfiles += 1
|
||||
self.add_item(item)
|
||||
return status
|
||||
|
||||
@staticmethod
|
||||
def list_archives(repository, key, manifest, cache=None):
|
||||
|
|
|
@ -165,16 +165,16 @@ def _process(self, archive, cache, excludes, exclude_caches, skip_inodes, path,
|
|||
# Ignore unix sockets
|
||||
if stat.S_ISSOCK(st.st_mode):
|
||||
return
|
||||
self.print_verbose(remove_surrogates(path))
|
||||
status = None
|
||||
if stat.S_ISREG(st.st_mode):
|
||||
try:
|
||||
archive.process_file(path, st, cache)
|
||||
status = archive.process_file(path, st, cache)
|
||||
except IOError as e:
|
||||
self.print_error('%s: %s', path, e)
|
||||
elif stat.S_ISDIR(st.st_mode):
|
||||
if exclude_caches and is_cachedir(path):
|
||||
return
|
||||
archive.process_item(path, st)
|
||||
status = archive.process_dir(path, st)
|
||||
try:
|
||||
entries = os.listdir(path)
|
||||
except OSError as e:
|
||||
|
@ -185,13 +185,27 @@ def _process(self, archive, cache, excludes, exclude_caches, skip_inodes, path,
|
|||
self._process(archive, cache, excludes, exclude_caches, skip_inodes,
|
||||
entry_path, restrict_dev)
|
||||
elif stat.S_ISLNK(st.st_mode):
|
||||
archive.process_symlink(path, st)
|
||||
status = archive.process_symlink(path, st)
|
||||
elif stat.S_ISFIFO(st.st_mode):
|
||||
archive.process_item(path, st)
|
||||
status = archive.process_fifo(path, st)
|
||||
elif stat.S_ISCHR(st.st_mode) or stat.S_ISBLK(st.st_mode):
|
||||
archive.process_dev(path, st)
|
||||
status = archive.process_dev(path, st)
|
||||
else:
|
||||
self.print_error('Unknown file type: %s', path)
|
||||
return
|
||||
# Status output
|
||||
# A lowercase character means a file type other than a regular file,
|
||||
# attic usually just stores them. E.g. (d)irectory.
|
||||
# Hardlinks to already seen content are indicated by (h).
|
||||
# A uppercase character means a regular file that was (A)dded,
|
||||
# (M)odified or was (U)nchanged.
|
||||
# Note: A/M/U is relative to the "files" cache, not to the repo.
|
||||
# This would be an issue if the files cache is not used.
|
||||
if status is None:
|
||||
status = '?' # need to add a status code somewhere
|
||||
# output ALL the stuff - it can be easily filtered using grep.
|
||||
# even stuff considered unchanged might be interesting.
|
||||
self.print_verbose("%1s %s", status, remove_surrogates(path))
|
||||
|
||||
def do_extract(self, args):
|
||||
"""Extract archive contents"""
|
||||
|
|
Loading…
Reference in a new issue