Add more checks for canceled contexts

This commit is contained in:
Michael Eischer
2024-07-31 19:30:47 +02:00
parent 8d5e188218
commit ae1cb889dd
17 changed files with 87 additions and 3 deletions

View File

@@ -578,6 +578,10 @@ func (r *SFTP) deleteRecursive(ctx context.Context, name string) error {
}
for _, fi := range entries {
if ctx.Err() != nil {
return ctx.Err()
}
itemName := r.Join(name, fi.Name())
if fi.IsDir() {
err := r.deleteRecursive(ctx, itemName)

View File

@@ -107,6 +107,10 @@ func (d *dir) open(ctx context.Context) error {
}
items := make(map[string]*restic.Node)
for _, n := range tree.Nodes {
if ctx.Err() != nil {
return ctx.Err()
}
nodes, err := replaceSpecialNodes(ctx, d.root.repo, n)
if err != nil {
debug.Log(" replaceSpecialNodes(%v) failed: %v", n, err)
@@ -171,6 +175,10 @@ func (d *dir) ReadDirAll(ctx context.Context) ([]fuse.Dirent, error) {
})
for _, node := range d.items {
if ctx.Err() != nil {
return nil, ctx.Err()
}
name := cleanupNodeName(node.Name)
var typ fuse.DirentType
switch node.Type {

View File

@@ -66,12 +66,16 @@ func (f *file) Attr(_ context.Context, a *fuse.Attr) error {
}
func (f *file) Open(_ context.Context, _ *fuse.OpenRequest, _ *fuse.OpenResponse) (fs.Handle, error) {
func (f *file) Open(ctx context.Context, _ *fuse.OpenRequest, _ *fuse.OpenResponse) (fs.Handle, error) {
debug.Log("open file %v with %d blobs", f.node.Name, len(f.node.Content))
var bytes uint64
cumsize := make([]uint64, 1+len(f.node.Content))
for i, id := range f.node.Content {
if ctx.Err() != nil {
return nil, ctx.Err()
}
size, found := f.root.repo.LookupBlobSize(restic.DataBlob, id)
if !found {
return nil, errors.Errorf("id %v not found in repository", id)

View File

@@ -78,6 +78,10 @@ func (d *SnapshotsDir) ReadDirAll(ctx context.Context) ([]fuse.Dirent, error) {
}
for name, entry := range meta.names {
if ctx.Err() != nil {
return nil, ctx.Err()
}
d := fuse.Dirent{
Inode: inodeFromName(d.inode, name),
Name: name,

View File

@@ -95,6 +95,10 @@ func checkPackInner(ctx context.Context, r *Repository, id restic.ID, blobs []re
it := newPackBlobIterator(id, newBufReader(bufRd), 0, blobs, r.Key(), dec)
for {
if ctx.Err() != nil {
return ctx.Err()
}
val, err := it.Next()
if err == errPackEOF {
break

View File

@@ -1000,6 +1000,10 @@ func streamPackPart(ctx context.Context, beLoad backendLoadFn, loadBlobFn loadBl
it := newPackBlobIterator(packID, newByteReader(data), dataStart, blobs, key, dec)
for {
if ctx.Err() != nil {
return ctx.Err()
}
val, err := it.Next()
if err == errPackEOF {
break

View File

@@ -134,6 +134,10 @@ func (f *SnapshotFilter) FindAll(ctx context.Context, be Lister, loader LoaderUn
ids := NewIDSet()
// Process all snapshot IDs given as arguments.
for _, s := range snapshotIDs {
if ctx.Err() != nil {
return ctx.Err()
}
var sn *Snapshot
if s == "latest" {
if usedFilter {

View File

@@ -122,6 +122,10 @@ func (r *fileRestorer) restoreFiles(ctx context.Context) error {
// create packInfo from fileInfo
for _, file := range r.files {
if ctx.Err() != nil {
return ctx.Err()
}
fileBlobs := file.blobs.(restic.IDs)
largeFile := len(fileBlobs) > largeFileBlobCount
var packsMap map[restic.ID][]fileBlobInfo

View File

@@ -450,7 +450,7 @@ func (res *Restorer) RestoreTo(ctx context.Context, dst string) error {
},
leaveDir: func(node *restic.Node, target, location string, expectedFilenames []string) error {
if res.opts.Delete {
if err := res.removeUnexpectedFiles(target, location, expectedFilenames); err != nil {
if err := res.removeUnexpectedFiles(ctx, target, location, expectedFilenames); err != nil {
return err
}
}
@@ -469,7 +469,7 @@ func (res *Restorer) RestoreTo(ctx context.Context, dst string) error {
return err
}
func (res *Restorer) removeUnexpectedFiles(target, location string, expectedFilenames []string) error {
func (res *Restorer) removeUnexpectedFiles(ctx context.Context, target, location string, expectedFilenames []string) error {
if !res.opts.Delete {
panic("internal error")
}
@@ -487,6 +487,10 @@ func (res *Restorer) removeUnexpectedFiles(target, location string, expectedFile
}
for _, entry := range entries {
if ctx.Err() != nil {
return ctx.Err()
}
if _, ok := keep[toComparableFilename(entry)]; ok {
continue
}

View File

@@ -116,6 +116,10 @@ func (t *TreeRewriter) RewriteTree(ctx context.Context, repo BlobLoadSaver, node
tb := restic.NewTreeJSONBuilder()
for _, node := range curTree.Nodes {
if ctx.Err() != nil {
return restic.ID{}, ctx.Err()
}
path := path.Join(nodepath, node.Name)
node = t.opts.RewriteNode(node, path)
if node == nil {

View File

@@ -57,6 +57,10 @@ func walk(ctx context.Context, repo restic.BlobLoader, prefix string, parentTree
})
for _, node := range tree.Nodes {
if ctx.Err() != nil {
return ctx.Err()
}
p := path.Join(prefix, node.Name)
if node.Type == "" {