1
0
Fork 0
mirror of https://github.com/borgbackup/borg.git synced 2024-12-23 16:26:29 +00:00

borg mount: support exclusion group options and paths, fixes #2138

borg mount [options] repo_or_archive mountpoint path [paths...]

paths: you can just give some "root paths" (like for borg extract) to
only partially populate the FUSE filesystem.

Similar for these exclusion group options:
--exclude
--exclude-from
--pattern
--patterns-from
--strip-components
This commit is contained in:
Thomas Waldmann 2017-11-07 17:45:22 +01:00
parent 759c8f77cf
commit 77df1cfe8c
2 changed files with 18 additions and 3 deletions

View file

@ -2325,6 +2325,9 @@ def define_archive_filters_group(subparser, *, sort_by=True, first_last=True):
subparser.add_argument('-o', dest='options', type=str,
help='Extra mount options')
define_archive_filters_group(subparser)
subparser.add_argument('paths', metavar='PATH', nargs='*', type=str,
help='paths to extract; patterns are supported')
define_exclusion_group(subparser, strip_components=True)
if parser.prog == 'borgfs':
return parser

View file

@ -17,6 +17,7 @@
logger = create_logger()
from .crypto.low_level import blake2b_128
from .archiver import Archiver
from .archive import Archive
from .hashindex import FuseVersionsIndex
from .helpers import daemonize, hardlinkable, signal_handler, format_file_size
@ -118,7 +119,7 @@ def get(self, inode):
else:
raise ValueError('Invalid entry type in self.meta')
def iter_archive_items(self, archive_item_ids):
def iter_archive_items(self, archive_item_ids, filter=None):
unpacker = msgpack.Unpacker()
# Current offset in the metadata stream, which consists of all metadata chunks glued together
@ -161,6 +162,11 @@ def write_bytes(append_msgpacked_bytes):
# Need more data, feed the next chunk
break
item = Item(internal_dict=item)
if filter and not filter(item):
msgpacked_bytes = b''
continue
current_item = msgpacked_bytes
current_item_length = len(current_item)
current_spans_chunks = stream_offset - current_item_length < chunk_begin
@ -197,7 +203,7 @@ def write_bytes(append_msgpacked_bytes):
inode = write_offset + self.offset
write_offset += 9
yield inode, Item(internal_dict=item)
yield inode, item
self.write_offset = write_offset
@ -289,7 +295,13 @@ def _process_archive(self, archive_name, prefix=[]):
t0 = time.perf_counter()
archive = Archive(self.repository_uncached, self.key, self._manifest, archive_name,
consider_part_files=self._args.consider_part_files)
for item_inode, item in self.cache.iter_archive_items(archive.metadata.items):
strip_components = self._args.strip_components
matcher = Archiver.build_matcher(self._args.patterns, self._args.paths)
dummy = lambda x, y: None # TODO: add hardlink_master support code, see Archiver
filter = Archiver.build_filter(matcher, dummy, strip_components)
for item_inode, item in self.cache.iter_archive_items(archive.metadata.items, filter=filter):
if strip_components:
item.path = os.sep.join(item.path.split(os.sep)[strip_components:])
path = os.fsencode(item.path)
is_dir = stat.S_ISDIR(item.mode)
if is_dir: