mirror of
https://github.com/borgbackup/borg.git
synced 2024-12-27 02:08:54 +00:00
rename chunk_to_id_data to cached_hash
This commit is contained in:
parent
92f221075a
commit
f3088a9893
2 changed files with 4 additions and 4 deletions
|
@ -19,7 +19,7 @@
|
||||||
logger = create_logger()
|
logger = create_logger()
|
||||||
|
|
||||||
from . import xattr
|
from . import xattr
|
||||||
from .chunker import get_chunker, max_chunk_size, Chunk, chunk_to_id_data, zeros
|
from .chunker import get_chunker, max_chunk_size, Chunk, cached_hash, zeros
|
||||||
from .cache import ChunkListEntry
|
from .cache import ChunkListEntry
|
||||||
from .crypto.key import key_factory
|
from .crypto.key import key_factory
|
||||||
from .compress import Compressor, CompressionSpec
|
from .compress import Compressor, CompressionSpec
|
||||||
|
@ -1133,7 +1133,7 @@ def maybe_checkpoint(self, item, from_chunk, part_number, forced=False):
|
||||||
def process_file_chunks(self, item, cache, stats, show_progress, chunk_iter, chunk_processor=None):
|
def process_file_chunks(self, item, cache, stats, show_progress, chunk_iter, chunk_processor=None):
|
||||||
if not chunk_processor:
|
if not chunk_processor:
|
||||||
def chunk_processor(chunk):
|
def chunk_processor(chunk):
|
||||||
chunk_id, data = chunk_to_id_data(chunk, self.key.id_hash)
|
chunk_id, data = cached_hash(chunk, self.key.id_hash)
|
||||||
chunk_entry = cache.add_chunk(chunk_id, data, stats, wait=False)
|
chunk_entry = cache.add_chunk(chunk_id, data, stats, wait=False)
|
||||||
self.cache.repository.async_response(wait=False)
|
self.cache.repository.async_response(wait=False)
|
||||||
return chunk_entry
|
return chunk_entry
|
||||||
|
@ -1983,7 +1983,7 @@ def process_chunks(self, archive, target, item):
|
||||||
target.process_file_chunks(item, self.cache, target.stats, self.progress, chunk_iterator, chunk_processor)
|
target.process_file_chunks(item, self.cache, target.stats, self.progress, chunk_iterator, chunk_processor)
|
||||||
|
|
||||||
def chunk_processor(self, target, chunk):
|
def chunk_processor(self, target, chunk):
|
||||||
chunk_id, data = chunk_to_id_data(chunk, self.key.id_hash)
|
chunk_id, data = cached_hash(chunk, self.key.id_hash)
|
||||||
if chunk_id in self.seen_chunks:
|
if chunk_id in self.seen_chunks:
|
||||||
return self.cache.chunk_incref(chunk_id, target.stats)
|
return self.cache.chunk_incref(chunk_id, target.stats)
|
||||||
overwrite = self.recompress
|
overwrite = self.recompress
|
||||||
|
|
|
@ -61,7 +61,7 @@ zeros = bytes(MAX_DATA_SIZE)
|
||||||
# have different hash_funcs within the same borg run.
|
# have different hash_funcs within the same borg run.
|
||||||
zero_chunk_ids = LRUCache(10, dispose=lambda _: None)
|
zero_chunk_ids = LRUCache(10, dispose=lambda _: None)
|
||||||
|
|
||||||
def chunk_to_id_data(chunk, id_hash):
|
def cached_hash(chunk, id_hash):
|
||||||
allocation = chunk.meta['allocation']
|
allocation = chunk.meta['allocation']
|
||||||
if allocation == CH_DATA:
|
if allocation == CH_DATA:
|
||||||
data = chunk.data
|
data = chunk.data
|
||||||
|
|
Loading…
Reference in a new issue