Revert "Reduce memory usage when backing up many small files"

The memory usage should be reduced a fair bit by #90 so this
might not be needed anymore. Or at least not enabled by default
This commit is contained in:
Jonas Borgström 2014-06-03 21:39:26 +02:00
parent 7f9fe03917
commit 80a2a4113f
2 changed files with 6 additions and 12 deletions

View File

@ -10,7 +10,6 @@ Version 0.13
- Reduce file cache memory usage (#90) - Reduce file cache memory usage (#90)
- Faster AES encryption (utilizing AES-NI when available) - Faster AES encryption (utilizing AES-NI when available)
- Reduced memory usage when backing up many small files (#69)
- Experimental Linux, OS X and FreeBSD ACL support (#66) - Experimental Linux, OS X and FreeBSD ACL support (#66)
- Added support for backup and restore of BSDFlags (OSX, FreeBSD) (#56) - Added support for backup and restore of BSDFlags (OSX, FreeBSD) (#56)
- Fix bug where xattrs on symlinks were not correctly restored - Fix bug where xattrs on symlinks were not correctly restored

View File

@ -12,9 +12,6 @@ from .hashindex import ChunkIndex
class Cache(object): class Cache(object):
"""Client Side cache """Client Side cache
""" """
# Do not cache file metadata for files smaller than this
FILE_MIN_SIZE = 4096
class RepositoryReplay(Error): class RepositoryReplay(Error):
"""Cache is newer than repository, refusing to continue""" """Cache is newer than repository, refusing to continue"""
@ -84,9 +81,8 @@ class Cache(object):
break break
u.feed(data) u.feed(data)
for path_hash, item in u: for path_hash, item in u:
if item[2] > self.FILE_MIN_SIZE: item[0] += 1
item[0] += 1 self.files[path_hash] = msgpack.packb(item)
self.files[path_hash] = msgpack.packb(item)
def begin_txn(self): def begin_txn(self):
# Initialize transaction snapshot # Initialize transaction snapshot
@ -223,8 +219,7 @@ class Cache(object):
return None return None
def memorize_file(self, path_hash, st, ids): def memorize_file(self, path_hash, st, ids):
if st.st_size > self.FILE_MIN_SIZE: # Entry: Age, inode, size, mtime, chunk ids
# Entry: Age, inode, size, mtime, chunk ids mtime_ns = st_mtime_ns(st)
mtime_ns = st_mtime_ns(st) self.files[path_hash] = msgpack.packb((0, st.st_ino, st.st_size, mtime_ns, ids))
self.files[path_hash] = msgpack.packb((0, st.st_ino, st.st_size, mtime_ns, ids)) self._newest_mtime = max(self._newest_mtime, mtime_ns)
self._newest_mtime = max(self._newest_mtime, mtime_ns)