mirror of
https://github.com/borgbackup/borg.git
synced 2025-02-22 22:22:27 +00:00
remove cpu intensive compression methods for the chunks.archive
also remove the comment about how good xz compresses - while that was true for smaller index files, it seems to be less effective with bigger ones. maybe just an issue with compression dict size.
This commit is contained in:
parent
17c4394896
commit
f7210c749f
1 changed files with 4 additions and 5 deletions
|
@ -213,9 +213,6 @@ def sync(self):
|
|||
so it has complete and current information about all backup archives.
|
||||
Finally, it builds the master chunks index by merging all indices from
|
||||
the tar.
|
||||
|
||||
Note: compression (esp. xz) is very effective in keeping the tar
|
||||
relatively small compared to the files it contains.
|
||||
"""
|
||||
in_archive_path = os.path.join(self.path, 'chunks.archive')
|
||||
out_archive_path = os.path.join(self.path, 'chunks.archive.tmp')
|
||||
|
@ -234,8 +231,10 @@ def open_in_archive():
|
|||
return tf
|
||||
|
||||
def open_out_archive():
|
||||
for compression in ('xz', 'bz2', 'gz'):
|
||||
# xz needs py 3.3, bz2 and gz also work on 3.2
|
||||
for compression in ('gz', ):
|
||||
# 'xz' needs py 3.3 and is expensive on the cpu
|
||||
# 'bz2' also works on 3.2 and is expensive on the cpu
|
||||
# 'gz' also works on 3.2 and is less expensive on the cpu
|
||||
try:
|
||||
tf = tarfile.open(out_archive_path, 'w:'+compression, format=tarfile.PAX_FORMAT)
|
||||
break
|
||||
|
|
Loading…
Reference in a new issue