Revert "Reduce memory usage when backing up many small files"

The memory usage should be reduced a fair bit by #90 so this
might not be needed anymore. Or at least not enabled by default
This commit is contained in:
Jonas Borgström 2014-06-03 21:39:26 +02:00
parent 7f9fe03917
commit 80a2a4113f
2 changed files with 6 additions and 12 deletions

View File

@ -10,7 +10,6 @@ Version 0.13
- Reduce file cache memory usage (#90)
- Faster AES encryption (utilizing AES-NI when available)
- Reduced memory usage when backing up many small files (#69)
- Experimental Linux, OS X and FreeBSD ACL support (#66)
- Added support for backup and restore of BSDFlags (OSX, FreeBSD) (#56)
- Fix bug where xattrs on symlinks were not correctly restored

View File

@ -12,9 +12,6 @@ from .hashindex import ChunkIndex
class Cache(object):
"""Client Side cache
"""
# Do not cache file metadata for files smaller than this
FILE_MIN_SIZE = 4096
class RepositoryReplay(Error):
"""Cache is newer than repository, refusing to continue"""
@ -84,7 +81,6 @@ class Cache(object):
break
u.feed(data)
for path_hash, item in u:
if item[2] > self.FILE_MIN_SIZE:
item[0] += 1
self.files[path_hash] = msgpack.packb(item)
@ -223,7 +219,6 @@ class Cache(object):
return None
def memorize_file(self, path_hash, st, ids):
if st.st_size > self.FILE_MIN_SIZE:
# Entry: Age, inode, size, mtime, chunk ids
mtime_ns = st_mtime_ns(st)
self.files[path_hash] = msgpack.packb((0, st.st_ino, st.st_size, mtime_ns, ids))