mirror of
https://github.com/borgbase/vorta
synced 2025-01-02 21:25:48 +00:00
Remove compression field. Don't store timezone in db.
* src/vorta/borg/create.py (BorgCreateJob.process_result): Remove timezone from snapshot time. Don't save compressed size. * src/vorta/borg/info_repo.py (BorgInfoRepoJob.process_result): Remove compressed size. * src/vorta/borg/list_repo.py (BorgListRepoJob.process_result): Remove timezone from archive time.
This commit is contained in:
parent
2b2d61baa5
commit
f9d1260316
3 changed files with 4 additions and 4 deletions
|
@ -15,7 +15,8 @@ def process_result(self, result):
|
||||||
snapshot_id=result['data']['archive']['id'],
|
snapshot_id=result['data']['archive']['id'],
|
||||||
defaults={
|
defaults={
|
||||||
'name': result['data']['archive']['name'],
|
'name': result['data']['archive']['name'],
|
||||||
'time': dt.fromisoformat(result['data']['archive']['start']),
|
# SQLite can't save timezone, so we remove it here. TODO: Keep as UTC?
|
||||||
|
'time': dt.fromisoformat(result['data']['archive']['start']).replace(tzinfo=None),
|
||||||
'repo': result['params']['repo_id'],
|
'repo': result['params']['repo_id'],
|
||||||
'duration': result['data']['archive']['duration'],
|
'duration': result['data']['archive']['duration'],
|
||||||
'size': result['data']['archive']['stats']['deduplicated_size'],
|
'size': result['data']['archive']['stats']['deduplicated_size'],
|
||||||
|
@ -26,7 +27,7 @@ def process_result(self, result):
|
||||||
stats = result['data']['cache']['stats']
|
stats = result['data']['cache']['stats']
|
||||||
repo = RepoModel.get(id=result['params']['repo_id'])
|
repo = RepoModel.get(id=result['params']['repo_id'])
|
||||||
repo.total_size = stats['total_size']
|
repo.total_size = stats['total_size']
|
||||||
repo.unique_csize = stats['unique_csize']
|
# repo.unique_csize = stats['unique_csize']
|
||||||
repo.unique_size = stats['unique_size']
|
repo.unique_size = stats['unique_size']
|
||||||
repo.total_unique_chunks = stats['total_unique_chunks']
|
repo.total_unique_chunks = stats['total_unique_chunks']
|
||||||
repo.save()
|
repo.save()
|
||||||
|
|
|
@ -57,7 +57,6 @@ def process_result(self, result):
|
||||||
if 'cache' in result['data']:
|
if 'cache' in result['data']:
|
||||||
stats = result['data']['cache']['stats']
|
stats = result['data']['cache']['stats']
|
||||||
new_repo.total_size = stats['total_size']
|
new_repo.total_size = stats['total_size']
|
||||||
new_repo.unique_csize = stats['unique_csize']
|
|
||||||
new_repo.unique_size = stats['unique_size']
|
new_repo.unique_size = stats['unique_size']
|
||||||
new_repo.total_unique_chunks = stats['total_unique_chunks']
|
new_repo.total_unique_chunks = stats['total_unique_chunks']
|
||||||
if 'encryption' in result['data']:
|
if 'encryption' in result['data']:
|
||||||
|
|
|
@ -52,7 +52,7 @@ def process_result(self, result):
|
||||||
repo=repo.id,
|
repo=repo.id,
|
||||||
defaults={
|
defaults={
|
||||||
'name': archive['name'],
|
'name': archive['name'],
|
||||||
'time': dt.fromisoformat(archive['time']),
|
'time': dt.fromisoformat(archive['time']).replace(tzinfo=None),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
new_archive.save()
|
new_archive.save()
|
||||||
|
|
Loading…
Reference in a new issue