2022-06-12 12:43:43 +00:00
package index_test
2015-04-26 15:10:31 +00:00
import (
"bytes"
2022-05-26 12:42:22 +00:00
"context"
2023-01-14 19:41:07 +00:00
"fmt"
2018-01-08 19:54:53 +00:00
"math/rand"
2020-07-05 06:37:34 +00:00
"sync"
2015-04-26 15:10:31 +00:00
"testing"
2022-06-12 12:43:43 +00:00
"github.com/restic/restic/internal/index"
2017-07-24 15:42:25 +00:00
"github.com/restic/restic/internal/restic"
2017-10-02 13:06:39 +00:00
rtest "github.com/restic/restic/internal/test"
2015-04-26 15:10:31 +00:00
)
func TestIndexSerialize ( t * testing . T ) {
2020-11-05 21:18:00 +00:00
tests := [ ] restic . PackedBlob { }
2015-04-26 15:10:31 +00:00
2022-06-12 12:43:43 +00:00
idx := index . NewIndex ( )
2015-04-26 15:10:31 +00:00
// create 50 packs with 20 blobs each
for i := 0 ; i < 50 ; i ++ {
2016-09-01 19:37:59 +00:00
packID := restic . NewRandomID ( )
2022-05-26 11:41:06 +00:00
var blobs [ ] restic . Blob
2015-04-26 15:10:31 +00:00
pos := uint ( 0 )
for j := 0 ; j < 20 ; j ++ {
length := uint ( i * 100 + j )
2022-04-29 21:17:01 +00:00
uncompressedLength := uint ( 0 )
if i >= 25 {
// test a mix of compressed and uncompressed packs
uncompressedLength = 2 * length
}
2020-11-05 21:18:00 +00:00
pb := restic . PackedBlob {
2016-08-31 21:07:50 +00:00
Blob : restic . Blob {
2022-04-29 21:17:01 +00:00
BlobHandle : restic . NewRandomBlobHandle ( ) ,
Offset : pos ,
Length : length ,
UncompressedLength : uncompressedLength ,
2016-08-31 21:07:50 +00:00
} ,
2015-11-02 18:05:19 +00:00
PackID : packID ,
2020-11-05 21:18:00 +00:00
}
2022-05-26 11:41:06 +00:00
blobs = append ( blobs , pb . Blob )
2020-11-05 21:18:00 +00:00
tests = append ( tests , pb )
2015-04-26 15:10:31 +00:00
pos += length
}
2022-05-26 11:41:06 +00:00
idx . StorePack ( packID , blobs )
2015-04-26 15:10:31 +00:00
}
wr := bytes . NewBuffer ( nil )
err := idx . Encode ( wr )
2017-10-02 13:06:39 +00:00
rtest . OK ( t , err )
2015-04-26 15:10:31 +00:00
2020-10-17 07:06:10 +00:00
idx2ID := restic . NewRandomID ( )
2022-06-12 12:43:43 +00:00
idx2 , oldFormat , err := index . DecodeIndex ( wr . Bytes ( ) , idx2ID )
2017-10-02 13:06:39 +00:00
rtest . OK ( t , err )
rtest . Assert ( t , idx2 != nil ,
2015-04-26 15:10:31 +00:00
"nil returned for decoded index" )
2020-10-13 18:39:54 +00:00
rtest . Assert ( t , ! oldFormat , "new index format recognized as old format" )
2020-10-17 07:06:10 +00:00
indexID , err := idx2 . IDs ( )
rtest . OK ( t , err )
rtest . Equals ( t , indexID , restic . IDs { idx2ID } )
2015-04-26 15:10:31 +00:00
wr2 := bytes . NewBuffer ( nil )
err = idx2 . Encode ( wr2 )
2017-10-02 13:06:39 +00:00
rtest . OK ( t , err )
2015-04-26 15:10:31 +00:00
for _ , testBlob := range tests {
2020-11-05 21:18:00 +00:00
list := idx . Lookup ( testBlob . BlobHandle , nil )
2016-08-03 20:38:05 +00:00
if len ( list ) != 1 {
2020-11-05 21:18:00 +00:00
t . Errorf ( "expected one result for blob %v, got %v: %v" , testBlob . ID . Str ( ) , len ( list ) , list )
2016-08-03 20:38:05 +00:00
}
result := list [ 0 ]
2020-11-05 21:18:00 +00:00
rtest . Equals ( t , testBlob , result )
2015-04-26 15:10:31 +00:00
2020-11-05 21:18:00 +00:00
list2 := idx2 . Lookup ( testBlob . BlobHandle , nil )
2016-08-03 20:38:05 +00:00
if len ( list2 ) != 1 {
2020-11-05 21:18:00 +00:00
t . Errorf ( "expected one result for blob %v, got %v: %v" , testBlob . ID . Str ( ) , len ( list2 ) , list2 )
2016-08-03 20:38:05 +00:00
}
result2 := list2 [ 0 ]
2020-11-05 21:18:00 +00:00
rtest . Equals ( t , testBlob , result2 )
2015-04-26 15:10:31 +00:00
}
2015-10-12 20:34:12 +00:00
// add more blobs to idx
2020-11-05 21:18:00 +00:00
newtests := [ ] restic . PackedBlob { }
2015-04-26 15:10:31 +00:00
for i := 0 ; i < 10 ; i ++ {
2016-09-01 19:37:59 +00:00
packID := restic . NewRandomID ( )
2022-05-26 11:41:06 +00:00
var blobs [ ] restic . Blob
2015-04-26 15:10:31 +00:00
pos := uint ( 0 )
for j := 0 ; j < 10 ; j ++ {
length := uint ( i * 100 + j )
2020-11-05 21:18:00 +00:00
pb := restic . PackedBlob {
2016-08-31 21:07:50 +00:00
Blob : restic . Blob {
2020-11-05 21:18:00 +00:00
BlobHandle : restic . NewRandomBlobHandle ( ) ,
Offset : pos ,
Length : length ,
2016-08-31 21:07:50 +00:00
} ,
2015-11-02 18:05:19 +00:00
PackID : packID ,
2020-11-05 21:18:00 +00:00
}
2022-05-26 11:41:06 +00:00
blobs = append ( blobs , pb . Blob )
2020-11-05 21:18:00 +00:00
newtests = append ( newtests , pb )
2015-04-26 15:10:31 +00:00
pos += length
}
2022-05-26 11:41:06 +00:00
idx . StorePack ( packID , blobs )
2015-04-26 15:10:31 +00:00
}
2020-06-06 20:20:44 +00:00
// finalize; serialize idx, unserialize to idx3
idx . Finalize ( )
2015-04-26 15:10:31 +00:00
wr3 := bytes . NewBuffer ( nil )
2020-06-06 20:20:44 +00:00
err = idx . Encode ( wr3 )
2017-10-02 13:06:39 +00:00
rtest . OK ( t , err )
2015-04-26 15:10:31 +00:00
2017-10-02 13:06:39 +00:00
rtest . Assert ( t , idx . Final ( ) ,
2015-10-12 20:34:12 +00:00
"index not final after encoding" )
2016-09-01 19:37:59 +00:00
id := restic . NewRandomID ( )
2017-10-02 13:06:39 +00:00
rtest . OK ( t , idx . SetID ( id ) )
2020-07-04 05:06:14 +00:00
ids , err := idx . IDs ( )
2020-02-12 21:33:54 +00:00
rtest . OK ( t , err )
2020-07-04 05:06:14 +00:00
rtest . Equals ( t , restic . IDs { id } , ids )
2015-11-02 17:51:45 +00:00
2022-06-12 12:43:43 +00:00
idx3 , oldFormat , err := index . DecodeIndex ( wr3 . Bytes ( ) , id )
2017-10-02 13:06:39 +00:00
rtest . OK ( t , err )
rtest . Assert ( t , idx3 != nil ,
2015-04-26 15:10:31 +00:00
"nil returned for decoded index" )
2017-10-02 13:06:39 +00:00
rtest . Assert ( t , idx3 . Final ( ) ,
2015-10-12 20:34:12 +00:00
"decoded index is not final" )
2020-10-13 18:39:54 +00:00
rtest . Assert ( t , ! oldFormat , "new index format recognized as old format" )
2015-04-26 15:10:31 +00:00
// all new blobs must be in the index
for _ , testBlob := range newtests {
2020-11-05 21:18:00 +00:00
list := idx3 . Lookup ( testBlob . BlobHandle , nil )
2016-08-03 20:38:05 +00:00
if len ( list ) != 1 {
2020-11-05 21:18:00 +00:00
t . Errorf ( "expected one result for blob %v, got %v: %v" , testBlob . ID . Str ( ) , len ( list ) , list )
2016-08-03 20:38:05 +00:00
}
blob := list [ 0 ]
2020-11-05 21:18:00 +00:00
rtest . Equals ( t , testBlob , blob )
2015-04-26 15:10:31 +00:00
}
}
func TestIndexSize ( t * testing . T ) {
2022-06-12 12:43:43 +00:00
idx := index . NewIndex ( )
2015-04-26 15:10:31 +00:00
packs := 200
2022-05-26 11:41:06 +00:00
blobCount := 100
2015-04-26 15:10:31 +00:00
for i := 0 ; i < packs ; i ++ {
2016-09-01 19:37:59 +00:00
packID := restic . NewRandomID ( )
2022-05-26 11:41:06 +00:00
var blobs [ ] restic . Blob
2015-04-26 15:10:31 +00:00
pos := uint ( 0 )
2022-05-26 11:41:06 +00:00
for j := 0 ; j < blobCount ; j ++ {
2015-04-26 15:10:31 +00:00
length := uint ( i * 100 + j )
2022-05-26 11:41:06 +00:00
blobs = append ( blobs , restic . Blob {
BlobHandle : restic . NewRandomBlobHandle ( ) ,
Offset : pos ,
Length : length ,
2015-11-02 18:05:19 +00:00
} )
2015-04-26 15:10:31 +00:00
pos += length
}
2022-05-26 11:41:06 +00:00
idx . StorePack ( packID , blobs )
2015-04-26 15:10:31 +00:00
}
wr := bytes . NewBuffer ( nil )
err := idx . Encode ( wr )
2017-10-02 13:06:39 +00:00
rtest . OK ( t , err )
2015-04-26 15:10:31 +00:00
2022-05-26 11:41:06 +00:00
t . Logf ( "Index file size for %d blobs in %d packs is %d" , blobCount * packs , packs , wr . Len ( ) )
2015-04-26 15:10:31 +00:00
}
2017-07-15 18:35:45 +00:00
// example index serialization from doc/Design.rst
2022-04-29 21:17:01 +00:00
var docExampleV1 = [ ] byte ( `
2015-07-25 22:40:00 +00:00
{
"supersedes" : [
"ed54ae36197f4745ebc4b54d10e0f623eaaaedd03013eb7ae90df881b7781452"
] ,
"packs" : [
{
"id" : "73d04e6125cf3c28a299cc2f3cca3b78ceac396e4fcf9575e34536b26782413c" ,
"blobs" : [
{
"id" : "3ec79977ef0cf5de7b08cd12b874cd0f62bbaf7f07f3497a5b1bbcc8cb39b1ce" ,
"type" : "data" ,
"offset" : 0 ,
2022-04-29 21:17:01 +00:00
"length" : 38
2015-07-25 22:40:00 +00:00
} , {
"id" : "9ccb846e60d90d4eb915848add7aa7ea1e4bbabfc60e573db9f7bfb2789afbae" ,
"type" : "tree" ,
"offset" : 38 ,
2022-04-29 21:17:01 +00:00
"length" : 112
2015-07-25 22:40:00 +00:00
} ,
{
"id" : "d3dc577b4ffd38cc4b32122cabf8655a0223ed22edfd93b353dc0c3f2b0fdf66" ,
"type" : "data" ,
"offset" : 150 ,
"length" : 123
}
]
}
]
}
` )
2022-04-29 21:17:01 +00:00
var docExampleV2 = [ ] byte ( `
{
"supersedes" : [
"ed54ae36197f4745ebc4b54d10e0f623eaaaedd03013eb7ae90df881b7781452"
] ,
"packs" : [
{
"id" : "73d04e6125cf3c28a299cc2f3cca3b78ceac396e4fcf9575e34536b26782413c" ,
"blobs" : [
{
"id" : "3ec79977ef0cf5de7b08cd12b874cd0f62bbaf7f07f3497a5b1bbcc8cb39b1ce" ,
"type" : "data" ,
"offset" : 0 ,
"length" : 38
} ,
{
"id" : "9ccb846e60d90d4eb915848add7aa7ea1e4bbabfc60e573db9f7bfb2789afbae" ,
"type" : "tree" ,
"offset" : 38 ,
"length" : 112 ,
"uncompressed_length" : 511
} ,
{
"id" : "d3dc577b4ffd38cc4b32122cabf8655a0223ed22edfd93b353dc0c3f2b0fdf66" ,
"type" : "data" ,
"offset" : 150 ,
"length" : 123 ,
"uncompressed_length" : 234
}
]
}
]
}
` )
2015-07-25 22:40:00 +00:00
var docOldExample = [ ] byte ( `
2015-04-26 15:10:31 +00:00
[ {
"id" : "73d04e6125cf3c28a299cc2f3cca3b78ceac396e4fcf9575e34536b26782413c" ,
"blobs" : [
2015-07-25 22:40:00 +00:00
{
"id" : "3ec79977ef0cf5de7b08cd12b874cd0f62bbaf7f07f3497a5b1bbcc8cb39b1ce" ,
"type" : "data" ,
"offset" : 0 ,
2022-04-29 21:17:01 +00:00
"length" : 38
2015-07-25 22:40:00 +00:00
} , {
"id" : "9ccb846e60d90d4eb915848add7aa7ea1e4bbabfc60e573db9f7bfb2789afbae" ,
"type" : "tree" ,
"offset" : 38 ,
2022-04-29 21:17:01 +00:00
"length" : 112
2015-07-25 22:40:00 +00:00
} ,
{
"id" : "d3dc577b4ffd38cc4b32122cabf8655a0223ed22edfd93b353dc0c3f2b0fdf66" ,
"type" : "data" ,
"offset" : 150 ,
"length" : 123
}
2015-04-26 15:10:31 +00:00
]
} ]
` )
var exampleTests = [ ] struct {
2022-04-29 21:17:01 +00:00
id , packID restic . ID
tpe restic . BlobType
offset , length uint
uncompressedLength uint
2015-04-26 15:10:31 +00:00
} {
{
2016-09-04 12:38:18 +00:00
restic . TestParseID ( "3ec79977ef0cf5de7b08cd12b874cd0f62bbaf7f07f3497a5b1bbcc8cb39b1ce" ) ,
restic . TestParseID ( "73d04e6125cf3c28a299cc2f3cca3b78ceac396e4fcf9575e34536b26782413c" ) ,
2022-04-29 21:17:01 +00:00
restic . DataBlob , 0 , 38 , 0 ,
2015-04-26 15:10:31 +00:00
} , {
2016-09-04 12:38:18 +00:00
restic . TestParseID ( "9ccb846e60d90d4eb915848add7aa7ea1e4bbabfc60e573db9f7bfb2789afbae" ) ,
restic . TestParseID ( "73d04e6125cf3c28a299cc2f3cca3b78ceac396e4fcf9575e34536b26782413c" ) ,
2022-04-29 21:17:01 +00:00
restic . TreeBlob , 38 , 112 , 511 ,
2015-04-26 15:10:31 +00:00
} , {
2016-09-04 12:38:18 +00:00
restic . TestParseID ( "d3dc577b4ffd38cc4b32122cabf8655a0223ed22edfd93b353dc0c3f2b0fdf66" ) ,
restic . TestParseID ( "73d04e6125cf3c28a299cc2f3cca3b78ceac396e4fcf9575e34536b26782413c" ) ,
2022-04-29 21:17:01 +00:00
restic . DataBlob , 150 , 123 , 234 ,
2015-04-26 15:10:31 +00:00
} ,
}
2015-11-01 21:32:28 +00:00
var exampleLookupTest = struct {
2016-08-31 18:29:54 +00:00
packID restic . ID
2016-08-31 18:58:57 +00:00
blobs map [ restic . ID ] restic . BlobType
2015-11-01 21:32:28 +00:00
} {
2016-09-04 12:38:18 +00:00
restic . TestParseID ( "73d04e6125cf3c28a299cc2f3cca3b78ceac396e4fcf9575e34536b26782413c" ) ,
2016-08-31 18:58:57 +00:00
map [ restic . ID ] restic . BlobType {
2016-09-04 12:38:18 +00:00
restic . TestParseID ( "3ec79977ef0cf5de7b08cd12b874cd0f62bbaf7f07f3497a5b1bbcc8cb39b1ce" ) : restic . DataBlob ,
restic . TestParseID ( "9ccb846e60d90d4eb915848add7aa7ea1e4bbabfc60e573db9f7bfb2789afbae" ) : restic . TreeBlob ,
restic . TestParseID ( "d3dc577b4ffd38cc4b32122cabf8655a0223ed22edfd93b353dc0c3f2b0fdf66" ) : restic . DataBlob ,
2015-11-01 21:32:28 +00:00
} ,
}
2015-04-26 15:10:31 +00:00
func TestIndexUnserialize ( t * testing . T ) {
2022-04-29 21:17:01 +00:00
for _ , task := range [ ] struct {
idxBytes [ ] byte
version int
} {
{ docExampleV1 , 1 } ,
{ docExampleV2 , 2 } ,
} {
oldIdx := restic . IDs { restic . TestParseID ( "ed54ae36197f4745ebc4b54d10e0f623eaaaedd03013eb7ae90df881b7781452" ) }
2022-06-12 12:43:43 +00:00
idx , oldFormat , err := index . DecodeIndex ( task . idxBytes , restic . NewRandomID ( ) )
2022-04-29 21:17:01 +00:00
rtest . OK ( t , err )
rtest . Assert ( t , ! oldFormat , "new index format recognized as old format" )
for _ , test := range exampleTests {
list := idx . Lookup ( restic . BlobHandle { ID : test . id , Type : test . tpe } , nil )
if len ( list ) != 1 {
t . Errorf ( "expected one result for blob %v, got %v: %v" , test . id . Str ( ) , len ( list ) , list )
}
blob := list [ 0 ]
t . Logf ( "looking for blob %v/%v, got %v" , test . tpe , test . id . Str ( ) , blob )
rtest . Equals ( t , test . packID , blob . PackID )
rtest . Equals ( t , test . tpe , blob . Type )
rtest . Equals ( t , test . offset , blob . Offset )
rtest . Equals ( t , test . length , blob . Length )
if task . version == 1 {
rtest . Equals ( t , uint ( 0 ) , blob . UncompressedLength )
} else if task . version == 2 {
rtest . Equals ( t , test . uncompressedLength , blob . UncompressedLength )
} else {
t . Fatal ( "Invalid index version" )
}
2016-08-03 20:38:05 +00:00
}
2022-04-29 21:17:01 +00:00
rtest . Equals ( t , oldIdx , idx . Supersedes ( ) )
2016-08-03 20:38:05 +00:00
2022-05-26 12:42:22 +00:00
blobs := listPack ( idx , exampleLookupTest . packID )
2022-04-29 21:17:01 +00:00
if len ( blobs ) != len ( exampleLookupTest . blobs ) {
t . Fatalf ( "expected %d blobs in pack, got %d" , len ( exampleLookupTest . blobs ) , len ( blobs ) )
2015-11-01 21:32:28 +00:00
}
2022-04-29 21:17:01 +00:00
for _ , blob := range blobs {
b , ok := exampleLookupTest . blobs [ blob . ID ]
if ! ok {
t . Errorf ( "unexpected blob %v found" , blob . ID . Str ( ) )
}
if blob . Type != b {
t . Errorf ( "unexpected type for blob %v: want %v, got %v" , blob . ID . Str ( ) , b , blob . Type )
}
2016-08-03 20:38:05 +00:00
}
2015-11-01 21:32:28 +00:00
}
2015-07-25 22:40:00 +00:00
}
2022-06-12 12:43:43 +00:00
func listPack ( idx * index . Index , id restic . ID ) ( pbs [ ] restic . PackedBlob ) {
2022-08-19 18:04:39 +00:00
idx . Each ( context . TODO ( ) , func ( pb restic . PackedBlob ) {
2022-05-26 12:42:22 +00:00
if pb . PackID . Equal ( id ) {
pbs = append ( pbs , pb )
}
2022-08-19 18:04:39 +00:00
} )
2022-05-26 12:42:22 +00:00
return pbs
}
2020-07-05 06:37:34 +00:00
var (
benchmarkIndexJSON [ ] byte
benchmarkIndexJSONOnce sync . Once
)
func initBenchmarkIndexJSON ( ) {
2020-07-04 05:05:51 +00:00
idx , _ := createRandomIndex ( rand . New ( rand . NewSource ( 0 ) ) , 200000 )
2020-07-05 06:37:34 +00:00
var buf bytes . Buffer
2021-01-30 15:32:00 +00:00
err := idx . Encode ( & buf )
if err != nil {
panic ( err )
}
2020-07-05 06:37:34 +00:00
benchmarkIndexJSON = buf . Bytes ( )
}
2017-01-13 20:27:37 +00:00
func BenchmarkDecodeIndex ( b * testing . B ) {
2020-07-05 06:37:34 +00:00
benchmarkIndexJSONOnce . Do ( initBenchmarkIndexJSON )
2020-10-17 07:06:10 +00:00
id := restic . NewRandomID ( )
2017-01-13 20:27:37 +00:00
b . ResetTimer ( )
for i := 0 ; i < b . N ; i ++ {
2022-06-12 12:43:43 +00:00
_ , _ , err := index . DecodeIndex ( benchmarkIndexJSON , id )
2017-10-02 13:06:39 +00:00
rtest . OK ( b , err )
2017-01-13 20:27:37 +00:00
}
}
2020-07-05 06:37:34 +00:00
func BenchmarkDecodeIndexParallel ( b * testing . B ) {
benchmarkIndexJSONOnce . Do ( initBenchmarkIndexJSON )
2020-10-17 07:06:10 +00:00
id := restic . NewRandomID ( )
2020-07-05 06:37:34 +00:00
b . ResetTimer ( )
b . RunParallel ( func ( pb * testing . PB ) {
for pb . Next ( ) {
2022-06-12 12:43:43 +00:00
_ , _ , err := index . DecodeIndex ( benchmarkIndexJSON , id )
2020-07-05 06:37:34 +00:00
rtest . OK ( b , err )
}
} )
}
2023-01-14 19:41:07 +00:00
func BenchmarkEncodeIndex ( b * testing . B ) {
for _ , n := range [ ] int { 100 , 1000 , 10000 } {
idx , _ := createRandomIndex ( rand . New ( rand . NewSource ( 0 ) ) , n )
b . Run ( fmt . Sprint ( n ) , func ( b * testing . B ) {
buf := new ( bytes . Buffer )
err := idx . Encode ( buf )
rtest . OK ( b , err )
b . ResetTimer ( )
b . ReportAllocs ( )
for i := 0 ; i < b . N ; i ++ {
buf . Reset ( )
_ = idx . Encode ( buf )
}
} )
}
}
2015-07-25 22:40:00 +00:00
func TestIndexUnserializeOld ( t * testing . T ) {
2022-06-12 12:43:43 +00:00
idx , oldFormat , err := index . DecodeIndex ( docOldExample , restic . NewRandomID ( ) )
2017-10-02 13:06:39 +00:00
rtest . OK ( t , err )
2020-10-13 18:39:54 +00:00
rtest . Assert ( t , oldFormat , "old index format recognized as new format" )
2015-07-25 22:40:00 +00:00
for _ , test := range exampleTests {
2020-11-05 21:18:00 +00:00
list := idx . Lookup ( restic . BlobHandle { ID : test . id , Type : test . tpe } , nil )
2016-08-03 20:38:05 +00:00
if len ( list ) != 1 {
t . Errorf ( "expected one result for blob %v, got %v: %v" , test . id . Str ( ) , len ( list ) , list )
}
blob := list [ 0 ]
2017-10-02 13:06:39 +00:00
rtest . Equals ( t , test . packID , blob . PackID )
rtest . Equals ( t , test . tpe , blob . Type )
rtest . Equals ( t , test . offset , blob . Offset )
rtest . Equals ( t , test . length , blob . Length )
2015-07-25 22:40:00 +00:00
}
2017-10-02 13:06:39 +00:00
rtest . Equals ( t , 0 , len ( idx . Supersedes ( ) ) )
2015-08-08 10:22:17 +00:00
}
2015-10-25 14:28:01 +00:00
func TestIndexPacks ( t * testing . T ) {
2022-06-12 12:43:43 +00:00
idx := index . NewIndex ( )
2016-08-31 18:29:54 +00:00
packs := restic . NewIDSet ( )
2015-10-25 14:28:01 +00:00
for i := 0 ; i < 20 ; i ++ {
2016-09-01 19:37:59 +00:00
packID := restic . NewRandomID ( )
2022-05-26 11:41:06 +00:00
idx . StorePack ( packID , [ ] restic . Blob {
{
2020-11-05 21:18:00 +00:00
BlobHandle : restic . NewRandomBlobHandle ( ) ,
Offset : 0 ,
Length : 23 ,
2016-08-31 21:07:50 +00:00
} ,
2015-11-02 18:05:19 +00:00
} )
2015-10-25 14:28:01 +00:00
packs . Insert ( packID )
}
idxPacks := idx . Packs ( )
2017-10-02 13:06:39 +00:00
rtest . Assert ( t , packs . Equals ( idxPacks ) , "packs in index do not match packs added to index" )
2015-10-25 14:28:01 +00:00
}
2018-01-08 19:54:53 +00:00
const maxPackSize = 16 * 1024 * 1024
2018-01-14 00:43:37 +00:00
// This function generates a (insecure) random ID, similar to NewRandomID
func NewRandomTestID ( rng * rand . Rand ) restic . ID {
id := restic . ID { }
rng . Read ( id [ : ] )
return id
}
2022-06-12 12:43:43 +00:00
func createRandomIndex ( rng * rand . Rand , packfiles int ) ( idx * index . Index , lookupBh restic . BlobHandle ) {
idx = index . NewIndex ( )
2018-01-08 19:54:53 +00:00
2020-07-04 05:05:51 +00:00
// create index with given number of pack files
for i := 0 ; i < packfiles ; i ++ {
2018-01-14 00:43:37 +00:00
packID := NewRandomTestID ( rng )
2020-06-12 10:57:23 +00:00
var blobs [ ] restic . Blob
2018-01-08 19:54:53 +00:00
offset := 0
for offset < maxPackSize {
2020-07-05 06:37:34 +00:00
size := 2000 + rng . Intn ( 4 * 1024 * 1024 )
2018-01-14 00:43:37 +00:00
id := NewRandomTestID ( rng )
2020-06-12 10:57:23 +00:00
blobs = append ( blobs , restic . Blob {
2020-11-05 20:52:34 +00:00
BlobHandle : restic . BlobHandle {
Type : restic . DataBlob ,
ID : id ,
} ,
2022-04-29 21:17:01 +00:00
Length : uint ( size ) ,
UncompressedLength : uint ( 2 * size ) ,
Offset : uint ( offset ) ,
2018-01-08 19:54:53 +00:00
} )
offset += size
}
2020-06-12 10:57:23 +00:00
idx . StorePack ( packID , blobs )
2020-07-05 06:37:34 +00:00
if i == 0 {
2020-11-05 21:18:00 +00:00
lookupBh = restic . BlobHandle {
Type : restic . DataBlob ,
ID : blobs [ rng . Intn ( len ( blobs ) ) ] . ID ,
}
2020-07-05 06:37:34 +00:00
}
2018-01-08 19:54:53 +00:00
}
2020-11-05 21:18:00 +00:00
return idx , lookupBh
2018-01-08 19:54:53 +00:00
}
func BenchmarkIndexHasUnknown ( b * testing . B ) {
2020-07-04 05:05:51 +00:00
idx , _ := createRandomIndex ( rand . New ( rand . NewSource ( 0 ) ) , 200000 )
2020-11-05 21:18:00 +00:00
lookupBh := restic . NewRandomBlobHandle ( )
2018-01-08 19:54:53 +00:00
b . ResetTimer ( )
for i := 0 ; i < b . N ; i ++ {
2020-11-05 21:18:00 +00:00
idx . Has ( lookupBh )
2018-01-08 19:54:53 +00:00
}
}
func BenchmarkIndexHasKnown ( b * testing . B ) {
2020-11-05 21:18:00 +00:00
idx , lookupBh := createRandomIndex ( rand . New ( rand . NewSource ( 0 ) ) , 200000 )
2018-01-08 19:54:53 +00:00
b . ResetTimer ( )
for i := 0 ; i < b . N ; i ++ {
2020-11-05 21:18:00 +00:00
idx . Has ( lookupBh )
2018-01-08 19:54:53 +00:00
}
}
2018-01-08 18:38:21 +00:00
2020-06-12 10:57:23 +00:00
func BenchmarkIndexAlloc ( b * testing . B ) {
2020-07-05 06:37:34 +00:00
rng := rand . New ( rand . NewSource ( 0 ) )
2020-06-12 10:57:23 +00:00
b . ReportAllocs ( )
2020-07-05 06:37:34 +00:00
2020-06-12 10:57:23 +00:00
for i := 0 ; i < b . N ; i ++ {
2020-07-04 05:05:51 +00:00
createRandomIndex ( rng , 200000 )
2020-06-12 10:57:23 +00:00
}
}
2020-07-05 06:37:34 +00:00
func BenchmarkIndexAllocParallel ( b * testing . B ) {
b . ReportAllocs ( )
b . RunParallel ( func ( pb * testing . PB ) {
rng := rand . New ( rand . NewSource ( 0 ) )
for pb . Next ( ) {
2020-07-04 05:05:51 +00:00
createRandomIndex ( rng , 200000 )
2020-07-05 06:37:34 +00:00
}
} )
}
2018-01-08 18:38:21 +00:00
func TestIndexHas ( t * testing . T ) {
2020-11-05 21:18:00 +00:00
tests := [ ] restic . PackedBlob { }
2018-01-08 18:38:21 +00:00
2022-06-12 12:43:43 +00:00
idx := index . NewIndex ( )
2018-01-08 18:38:21 +00:00
// create 50 packs with 20 blobs each
for i := 0 ; i < 50 ; i ++ {
packID := restic . NewRandomID ( )
2022-05-26 11:41:06 +00:00
var blobs [ ] restic . Blob
2018-01-08 18:38:21 +00:00
pos := uint ( 0 )
for j := 0 ; j < 20 ; j ++ {
length := uint ( i * 100 + j )
2022-04-29 21:17:01 +00:00
uncompressedLength := uint ( 0 )
if i >= 25 {
// test a mix of compressed and uncompressed packs
uncompressedLength = 2 * length
}
2020-11-05 21:18:00 +00:00
pb := restic . PackedBlob {
2018-01-08 18:38:21 +00:00
Blob : restic . Blob {
2022-04-29 21:17:01 +00:00
BlobHandle : restic . NewRandomBlobHandle ( ) ,
Offset : pos ,
Length : length ,
UncompressedLength : uncompressedLength ,
2018-01-08 18:38:21 +00:00
} ,
PackID : packID ,
2020-11-05 21:18:00 +00:00
}
2022-05-26 11:41:06 +00:00
blobs = append ( blobs , pb . Blob )
2020-11-05 21:18:00 +00:00
tests = append ( tests , pb )
2018-01-08 18:38:21 +00:00
pos += length
}
2022-05-26 11:41:06 +00:00
idx . StorePack ( packID , blobs )
2018-01-08 18:38:21 +00:00
}
for _ , testBlob := range tests {
2020-11-05 21:18:00 +00:00
rtest . Assert ( t , idx . Has ( testBlob . BlobHandle ) , "Index reports not having data blob added to it" )
2018-01-08 18:38:21 +00:00
}
2020-11-05 21:18:00 +00:00
rtest . Assert ( t , ! idx . Has ( restic . NewRandomBlobHandle ( ) ) , "Index reports having a data blob not added to it" )
rtest . Assert ( t , ! idx . Has ( restic . BlobHandle { ID : tests [ 0 ] . ID , Type : restic . TreeBlob } ) , "Index reports having a tree blob added to it with the same id as a data blob" )
2018-01-08 18:38:21 +00:00
}
2022-05-24 20:30:42 +00:00
func TestMixedEachByPack ( t * testing . T ) {
2022-06-12 12:43:43 +00:00
idx := index . NewIndex ( )
2022-05-24 20:30:42 +00:00
expected := make ( map [ restic . ID ] int )
// create 50 packs with 2 blobs each
for i := 0 ; i < 50 ; i ++ {
packID := restic . NewRandomID ( )
expected [ packID ] = 1
blobs := [ ] restic . Blob {
{
BlobHandle : restic . BlobHandle { Type : restic . DataBlob , ID : restic . NewRandomID ( ) } ,
Offset : 0 ,
Length : 42 ,
} ,
{
BlobHandle : restic . BlobHandle { Type : restic . TreeBlob , ID : restic . NewRandomID ( ) } ,
Offset : 42 ,
Length : 43 ,
} ,
}
idx . StorePack ( packID , blobs )
}
reported := make ( map [ restic . ID ] int )
for bp := range idx . EachByPack ( context . TODO ( ) , restic . NewIDSet ( ) ) {
reported [ bp . PackID ] ++
rtest . Equals ( t , 2 , len ( bp . Blobs ) ) // correct blob count
if bp . Blobs [ 0 ] . Offset > bp . Blobs [ 1 ] . Offset {
bp . Blobs [ 1 ] , bp . Blobs [ 0 ] = bp . Blobs [ 0 ] , bp . Blobs [ 1 ]
}
b0 := bp . Blobs [ 0 ]
rtest . Assert ( t , b0 . Type == restic . DataBlob && b0 . Offset == 0 && b0 . Length == 42 , "wrong blob" , b0 )
b1 := bp . Blobs [ 1 ]
rtest . Assert ( t , b1 . Type == restic . TreeBlob && b1 . Offset == 42 && b1 . Length == 43 , "wrong blob" , b1 )
}
rtest . Equals ( t , expected , reported )
}
2022-08-21 08:38:40 +00:00
func TestEachByPackIgnoes ( t * testing . T ) {
2022-06-12 12:43:43 +00:00
idx := index . NewIndex ( )
2022-08-21 08:38:40 +00:00
ignores := restic . NewIDSet ( )
expected := make ( map [ restic . ID ] int )
// create 50 packs with one blob each
for i := 0 ; i < 50 ; i ++ {
packID := restic . NewRandomID ( )
if i < 3 {
ignores . Insert ( packID )
} else {
expected [ packID ] = 1
}
blobs := [ ] restic . Blob {
{
BlobHandle : restic . BlobHandle { Type : restic . DataBlob , ID : restic . NewRandomID ( ) } ,
Offset : 0 ,
Length : 42 ,
} ,
}
idx . StorePack ( packID , blobs )
}
idx . Finalize ( )
reported := make ( map [ restic . ID ] int )
for bp := range idx . EachByPack ( context . TODO ( ) , ignores ) {
reported [ bp . PackID ] ++
rtest . Equals ( t , 1 , len ( bp . Blobs ) ) // correct blob count
b0 := bp . Blobs [ 0 ]
rtest . Assert ( t , b0 . Type == restic . DataBlob && b0 . Offset == 0 && b0 . Length == 42 , "wrong blob" , b0 )
}
rtest . Equals ( t , expected , reported )
}