More fixes.

This commit is contained in:
Jonas Borgström 2010-10-27 19:30:21 +02:00
parent 5fd716bb92
commit e405eb4a42
4 changed files with 57 additions and 38 deletions

View File

@ -9,10 +9,13 @@ import sys
from . import NS_ARCHIVE_METADATA, NS_ARCHIVE_ITEMS, NS_ARCHIVE_CHUNKS, NS_CHUNK
from .chunkifier import chunkify
from .helpers import uid2user, user2uid, gid2group, group2gid, IntegrityError, mod_to_str
from .helpers import uid2user, user2uid, gid2group, group2gid, \
IntegrityError, format_file_mode, format_time
CHUNK_SIZE = 55001
have_lchmod = hasattr(os, 'lchmod')
class Archive(object):
@ -94,9 +97,9 @@ class Archive(object):
self.get_items()
for item in self.items:
type = tmap[item['type']]
mode = mod_to_str(item['mode'])
mode = format_file_mode(item['mode'])
size = item.get('size', 0)
mtime = datetime.fromtimestamp(item['mtime'])
mtime = format_time(datetime.fromtimestamp(item['mtime']))
print '%s%s %-6s %-6s %8d %s %s' % (type, mode, item['user'],
item['group'], size, mtime, item['path'])
@ -151,14 +154,14 @@ class Archive(object):
self.restore_stat(*dir_stat_queue.pop())
def restore_stat(self, path, item, symlink=False):
os.lchmod(path, item['mode'])
if have_lchmod:
os.lchmod(path, item['mode'])
elif not symlink:
os.chmod(path, item['mode'])
uid = user2uid(item['user']) or item['uid']
gid = group2gid(item['group']) or item['gid']
try:
if hasattr(os, 'lchown'): # Not available on Linux
os.lchown(path, uid, gid)
elif not symlink:
os.chown(path, uid, gid)
os.lchown(path, uid, gid)
except OSError:
pass
if not symlink:
@ -306,7 +309,7 @@ class Archive(object):
def process_chunk(self, id, data, cache):
idx = len(self.chunks)
size = cache.add_chunk(id, data, self.crypto)
size = cache.add_chunk(id, data)
self.chunks.append((id, size))
self.chunk_idx[id] = idx
return idx

View File

@ -7,7 +7,7 @@ from .archive import Archive
from .store import Store
from .cache import Cache
from .crypto import CryptoManager, KeyChain
from .helpers import location_validator, pretty_size, LevelFilter
from .helpers import location_validator, format_file_size, LevelFilter
class Archiver(object):
@ -81,9 +81,9 @@ class Archiver(object):
print 'Time:', archive.metadata['time']
print 'Command line:', ' '.join(archive.metadata['cmdline'])
print 'Number of Files:', len(archive.items)
print 'Original size:', pretty_size(osize)
print 'Compressed size:', pretty_size(csize)
print 'Unique data:', pretty_size(usize)
print 'Original size:', format_file_size(osize)
print 'Compressed size:', format_file_size(csize)
print 'Unique data:', format_file_size(usize)
return self.exit_code_from_logger()
def do_keychain_generate(self, args):

View File

@ -12,24 +12,26 @@ class Cache(object):
def __init__(self, store, crypto):
self.store = store
self.crypto = crypto
self.path = os.path.join(os.path.expanduser('~'), '.dedupestore', 'cache',
'%s.cache' % self.store.id.encode('hex'))
self.tid = -1
self.open()
if self.tid != self.store.tid:
self.init(crypto)
self.init()
def open(self):
if not os.path.exists(self.path):
return
cache = msgpack.unpackb(open(self.path, 'rb').read())
with open(self.path, 'rb') as fd:
data, hash = self.crypto.decrypt(fd.read())
cache = msgpack.unpackb(data)
assert cache['version'] == 1
self.chunk_counts = cache['chunk_counts']
# Discard old file_chunks entries
self.file_chunks = cache['file_chunks']
self.tid = cache['tid']
def init(self, crypto):
def init(self):
"""Initializes cache by fetching and reading all archive indicies
"""
logging.info('Initializing cache...')
@ -39,7 +41,7 @@ class Cache(object):
if self.store.tid == 0:
return
for id in list(self.store.list(NS_ARCHIVE_CHUNKS)):
data, hash = crypto.decrypt(self.store.get(NS_ARCHIVE_CHUNKS, id))
data, hash = self.crypto.decrypt(self.store.get(NS_ARCHIVE_CHUNKS, id))
cindex = msgpack.unpackb(data)
for id, size in cindex['chunks']:
try:
@ -57,17 +59,17 @@ class Cache(object):
'file_chunks': dict(ifilter(lambda i: i[1][0] < 8,
self.file_chunks.iteritems())),
}
data = msgpack.packb(cache)
data, hash = self.crypto.encrypt_create(msgpack.packb(cache))
cachedir = os.path.dirname(self.path)
if not os.path.exists(cachedir):
os.makedirs(cachedir)
with open(self.path, 'wb') as fd:
fd.write(data)
def add_chunk(self, id, data, crypto):
def add_chunk(self, id, data):
if self.seen_chunk(id):
return self.chunk_incref(id)
data, hash = crypto.encrypt_read(data)
data, hash = self.crypto.encrypt_read(data)
csize = len(data)
self.store.put(NS_CHUNK, id, data)
self.chunk_counts[id] = (1, csize)

View File

@ -1,16 +1,44 @@
import argparse
from datetime import datetime
import grp
import logging
import pwd
import stat
import re
def format_time(t):
"""Format datetime suitable for fixed length list output
"""
if (datetime.now() - t).days < 365:
return t.strftime('%b %d %H:%M')
else:
return t.strftime('%d %d %Y')
def format_file_mode(mod):
"""Format file mode bits for list output
"""
def x(v):
return ''.join(v & m and s or '-'
for m, s in ((4, 'r'), (2, 'w'), (1, 'x')))
return '%s%s%s' % (x(mod / 64), x(mod / 8), x(mod))
def format_file_size(v):
"""Format file size into a human friendly format
"""
if v > 1024 * 1024 * 1024:
return '%.2f GB' % (v / 1024. / 1024. / 1024.)
elif v > 1024 * 1024:
return '%.2f MB' % (v / 1024. / 1024.)
elif v > 1024:
return '%.2f kB' % (v / 1024.)
else:
return str(v)
class IntegrityError(Exception):
"""
"""
def memoize(function):
cache = {}
def decorated_function(*args):
@ -50,6 +78,7 @@ def group2gid(group):
except KeyError:
return None
class LevelFilter(logging.Filter):
"""Filter that counts record levels
"""
@ -110,18 +139,3 @@ def location_validator(archive=None):
return validator
def pretty_size(v):
if v > 1024 * 1024 * 1024:
return '%.2f GB' % (v / 1024. / 1024. / 1024.)
elif v > 1024 * 1024:
return '%.2f MB' % (v / 1024. / 1024.)
elif v > 1024:
return '%.2f kB' % (v / 1024.)
else:
return str(v)
def mod_to_str(mod):
def x(v):
return ''.join(v & m and s or '-' for m, s in ((4, 'r'), (2, 'w'), (1, 'x')))
return '%s%s%s' % (x(mod / 64), x(mod / 8), x(mod))