debug_cmds cleanup

This commit is contained in:
bigtedde 2023-07-13 19:33:56 -04:00
parent 225fdb0b72
commit 3a7ee07cf3
1 changed files with 17 additions and 11 deletions

View File

@ -62,22 +62,30 @@ def test_debug_dump_repo_objs(archivers, request):
def test_debug_put_get_delete_obj(archivers, request):
archiver = request.getfixturevalue(archivers)
repo_location, input_path = archiver.repository_location, archiver.input_path
cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
data = b"some data"
create_regular_file(input_path, "file", contents=data)
output = cmd(archiver, f"--repo={repo_location}", "debug", "id-hash", "input/file")
id_hash = output.strip()
output = cmd(archiver, f"--repo={repo_location}", "debug", "put-obj", id_hash, "input/file")
assert id_hash in output
output = cmd(archiver, f"--repo={repo_location}", "debug", "get-obj", id_hash, "output/file")
assert id_hash in output
with open("output/file", "rb") as f:
data_read = f.read()
assert data == data_read
output = cmd(archiver, f"--repo={repo_location}", "debug", "delete-obj", id_hash)
assert "deleted" in output
output = cmd(archiver, f"--repo={repo_location}", "debug", "delete-obj", id_hash)
assert "not found" in output
output = cmd(archiver, f"--repo={repo_location}", "debug", "delete-obj", "invalid")
assert "is invalid" in output
@ -93,10 +101,8 @@ def test_debug_id_hash_format_put_get_parse_obj(archivers, request):
meta = json.dumps(meta_dict).encode()
create_regular_file(input_path, "plain.bin", contents=data)
create_regular_file(input_path, "meta.json", contents=meta)
output = cmd(archiver, f"--repo={repo_location}", "debug", "id-hash", "input/plain.bin")
id_hash = output.strip()
cmd(
archiver,
f"--repo={repo_location}",
@ -108,7 +114,6 @@ def test_debug_id_hash_format_put_get_parse_obj(archivers, request):
"output/data.bin",
"--compression=zstd,2",
)
output = cmd(archiver, f"--repo={repo_location}", "debug", "put-obj", id_hash, "output/data.bin")
assert id_hash in output
@ -125,7 +130,6 @@ def test_debug_id_hash_format_put_get_parse_obj(archivers, request):
"output/plain.bin",
"output/meta.json",
)
with open("output/plain.bin", "rb") as f:
data_read = f.read()
assert data == data_read
@ -134,7 +138,6 @@ def test_debug_id_hash_format_put_get_parse_obj(archivers, request):
meta_read = json.load(f)
for key, value in meta_dict.items():
assert meta_read.get(key) == value
assert meta_read.get("size") == len(data_read)
c = Compressor(name="zstd", level=2)
@ -147,12 +150,14 @@ def test_debug_id_hash_format_put_get_parse_obj(archivers, request):
def test_debug_dump_manifest(archivers, request):
archiver = request.getfixturevalue(archivers)
repo_location, input_path = archiver.repository_location, archiver.input_path
create_regular_file(input_path, "file1", size=1024 * 80)
cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
dump_file = archiver.output_path + "/dump"
output = cmd(archiver, f"--repo={repo_location}", "debug", "dump-manifest", dump_file)
assert output == ""
with open(dump_file) as f:
result = json.load(f)
assert "archives" in result
@ -166,12 +171,14 @@ def test_debug_dump_manifest(archivers, request):
def test_debug_dump_archive(archivers, request):
archiver = request.getfixturevalue(archivers)
repo_location, input_path = archiver.repository_location, archiver.input_path
create_regular_file(input_path, "file1", size=1024 * 80)
cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
cmd(archiver, f"--repo={repo_location}", "create", "test", "input")
dump_file = archiver.output_path + "/dump"
output = cmd(archiver, f"--repo={repo_location}", "debug", "dump-archive", "test", dump_file)
assert output == ""
with open(dump_file) as f:
result = json.load(f)
assert "_name" in result
@ -183,21 +190,20 @@ def test_debug_dump_archive(archivers, request):
def test_debug_refcount_obj(archivers, request):
archiver = request.getfixturevalue(archivers)
repo_location = archiver.repository_location
cmd(archiver, f"--repo={repo_location}", "rcreate", RK_ENCRYPTION)
output = cmd(archiver, f"--repo={repo_location}", "debug", "refcount-obj", "0" * 64).strip()
assert (
output
== "object 0000000000000000000000000000000000000000000000000000000000000000 not found [info from chunks cache]."
)
info = "object 0000000000000000000000000000000000000000000000000000000000000000 not found [info from chunks cache]."
assert output == info
create_json = json.loads(cmd(archiver, f"--repo={repo_location}", "create", "--json", "test", "input"))
archive_id = create_json["archive"]["id"]
output = cmd(archiver, f"--repo={repo_location}", "debug", "refcount-obj", archive_id).strip()
assert output == "object " + archive_id + " has 1 referrers [info from chunks cache]."
assert output == f"object {archive_id} has 1 referrers [info from chunks cache]."
# Invalid IDs do not abort or return an error
output = cmd(archiver, f"--repo={repo_location}", "debug", "refcount-obj", "124", "xyza").strip()
assert output == "object id 124 is invalid." + os.linesep + "object id xyza is invalid."
assert output == f"object id 124 is invalid.{os.linesep}object id xyza is invalid."
def test_debug_info(archivers, request):