mirror of https://github.com/restic/restic.git
lock: test stale lock refresh
This commit is contained in:
parent
2dd6769429
commit
1f6883a05c
|
@ -159,6 +159,7 @@ func refreshLocks(ctx context.Context, backend restic.Backend, lockInfo *lockCon
|
|||
return
|
||||
|
||||
case req := <-forceRefresh:
|
||||
debug.Log("trying to refresh stale lock")
|
||||
// keep on going if our current lock still exists
|
||||
success := tryRefreshStaleLock(ctx, backend, lock, lockInfo.cancel)
|
||||
// inform refresh goroutine about forced refresh
|
||||
|
@ -231,6 +232,7 @@ func monitorLockRefresh(ctx context.Context, lockInfo *lockContext, refreshed <-
|
|||
continue
|
||||
}
|
||||
|
||||
debug.Log("trying to refreshStaleLock")
|
||||
// keep on going if our current lock still exists
|
||||
refreshReq := refreshLockRequest{
|
||||
result: make(chan bool),
|
||||
|
|
|
@ -5,11 +5,13 @@ import (
|
|||
"fmt"
|
||||
"runtime"
|
||||
"strings"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/restic/restic/internal/backend/location"
|
||||
"github.com/restic/restic/internal/backend/mem"
|
||||
"github.com/restic/restic/internal/debug"
|
||||
"github.com/restic/restic/internal/repository"
|
||||
"github.com/restic/restic/internal/restic"
|
||||
"github.com/restic/restic/internal/test"
|
||||
|
@ -190,6 +192,69 @@ func TestLockSuccessfulRefresh(t *testing.T) {
|
|||
unlockRepo(lock)
|
||||
}
|
||||
|
||||
type slowBackend struct {
|
||||
restic.Backend
|
||||
m sync.Mutex
|
||||
sleep time.Duration
|
||||
}
|
||||
|
||||
func (b *slowBackend) Save(ctx context.Context, h restic.Handle, rd restic.RewindReader) error {
|
||||
b.m.Lock()
|
||||
sleep := b.sleep
|
||||
b.m.Unlock()
|
||||
time.Sleep(sleep)
|
||||
return b.Backend.Save(ctx, h, rd)
|
||||
}
|
||||
|
||||
func TestLockSuccessfulStaleRefresh(t *testing.T) {
|
||||
var sb *slowBackend
|
||||
repo, cleanup, env := openLockTestRepo(t, func(r restic.Backend) (restic.Backend, error) {
|
||||
sb = &slowBackend{Backend: r}
|
||||
return sb, nil
|
||||
})
|
||||
defer cleanup()
|
||||
|
||||
t.Logf("test for successful lock refresh %v", time.Now())
|
||||
// reduce locking intervals to be suitable for testing
|
||||
ri, rt := refreshInterval, refreshabilityTimeout
|
||||
refreshInterval = 10 * time.Millisecond
|
||||
refreshabilityTimeout = 50 * time.Millisecond
|
||||
defer func() {
|
||||
refreshInterval, refreshabilityTimeout = ri, rt
|
||||
}()
|
||||
|
||||
lock, wrappedCtx := checkedLockRepo(context.Background(), t, repo, env)
|
||||
// delay lock refreshing long enough that the lock would expire
|
||||
sb.m.Lock()
|
||||
sb.sleep = refreshabilityTimeout + refreshInterval
|
||||
sb.m.Unlock()
|
||||
|
||||
select {
|
||||
case <-wrappedCtx.Done():
|
||||
// don't call t.Fatal to allow the lock to be properly cleaned up
|
||||
t.Error("lock refresh failed", time.Now())
|
||||
|
||||
case <-time.After(refreshabilityTimeout):
|
||||
}
|
||||
// reset slow backend
|
||||
sb.m.Lock()
|
||||
sb.sleep = 0
|
||||
sb.m.Unlock()
|
||||
debug.Log("normal lock period has expired")
|
||||
|
||||
select {
|
||||
case <-wrappedCtx.Done():
|
||||
// don't call t.Fatal to allow the lock to be properly cleaned up
|
||||
t.Error("lock refresh failed", time.Now())
|
||||
|
||||
case <-time.After(3 * refreshabilityTimeout):
|
||||
// expected lock refresh to work
|
||||
}
|
||||
|
||||
// unlockRepo should not crash
|
||||
unlockRepo(lock)
|
||||
}
|
||||
|
||||
func TestLockWaitTimeout(t *testing.T) {
|
||||
repo, cleanup, env := openLockTestRepo(t, nil)
|
||||
defer cleanup()
|
||||
|
|
Loading…
Reference in New Issue