Skip to content
This repository has been archived by the owner on Nov 1, 2022. It is now read-only.

Commit

Permalink
Cleanup exported repos during sync failures
Browse files Browse the repository at this point in the history
Several functions which generate a clone of the repo can result in the
repository being left behind when an error is triggered. This shores up
some of those failure paths to prevent the storage leaks.
  • Loading branch information
nairb774 committed Sep 8, 2020
1 parent 755ad07 commit 1c55caf
Show file tree
Hide file tree
Showing 4 changed files with 93 additions and 7 deletions.
19 changes: 12 additions & 7 deletions pkg/daemon/sync.go
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,11 @@ func (d *Daemon) getManifestStoreByRevision(ctx context.Context, revision string
}

store, err = d.getManifestStore(clone)
return store, cleanupClone, err
if err != nil {
cleanupClone()
return nil, nil, err
}
return store, cleanupClone, nil
}

// cloneRepo makes a read-only clone of the given revision
Expand All @@ -166,21 +170,22 @@ func (d *Daemon) cloneRepo(ctx context.Context, revision string) (clone *git.Exp
return nil, nil, err
}

cleanup = func() {
if err := clone.Clean(); err != nil {
d.Logger.Log("error", fmt.Sprintf("cannot clean clone: %s", err))
}
}

// Unseal any secrets if enabled
if d.GitSecretEnabled {
ctxGitOp, cancel := context.WithTimeout(ctx, d.GitTimeout)
defer cancel()
if err := clone.SecretUnseal(ctxGitOp); err != nil {
cleanup()
return nil, nil, err
}
}

cleanup = func() {
if err := clone.Clean(); err != nil {
d.Logger.Log("error", fmt.Sprintf("cannot clean clone: %s", err))
}
}

return clone, cleanup, nil
}

Expand Down
24 changes: 24 additions & 0 deletions pkg/daemon/sync_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import (
"os"
"path"
"reflect"
"strings"
"sync"
"testing"
"time"
Expand Down Expand Up @@ -715,9 +716,32 @@ func TestDoSync_WithErrors(t *testing.T) {
t.Error(err)
}

findWorkingDirs := func() map[string]bool {
entries, err := ioutil.ReadDir(os.TempDir())
if err != nil {
t.Fatalf("ioutil.ReadDir(%q)=%v, want no error", os.TempDir(), err)
}

found := make(map[string]bool, len(entries))
for _, e := range entries {
// Makes an assumption about the location a repo export will happen.
if strings.HasPrefix(e.Name(), "flux-working") {
found[e.Name()] = true
}
}
return found
}

prior := findWorkingDirs()

if err := d.Sync(ctx, time.Now().UTC(), "HEAD", syncState); err != nil {
// Check error not nil, manifest counters remain the same
checkSyncManifestsMetrics(t, len(expectedResourceIDs), 0)
for d := range findWorkingDirs() {
if !prior[d] {
t.Errorf("Found %q in %q - failed to cleanup when returning an error", d, os.TempDir())
}
}
} else {
t.Error("Sync must fail because of invalid manifest")
}
Expand Down
1 change: 1 addition & 0 deletions pkg/git/export.go
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ func (r *Repo) Export(ctx context.Context, ref string) (*Export, error) {
return nil, err
}
if err = checkout(ctx, dir, ref); err != nil {
os.RemoveAll(dir)
return nil, err
}
return &Export{dir}, nil
Expand Down
56 changes: 56 additions & 0 deletions pkg/git/export_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@ package git

import (
"context"
"io/ioutil"
"os"
"strings"
"testing"
"time"

Expand All @@ -20,6 +23,7 @@ func TestExportAtRevision(t *testing.T) {
t.Fatal(err)
}
repo := NewRepo(Remote{URL: newDir}, ReadOnly)
defer repo.Clean()
if err := repo.Ready(ctx); err != nil {
t.Fatal(err)
}
Expand All @@ -33,6 +37,7 @@ func TestExportAtRevision(t *testing.T) {
if err != nil {
t.Fatal(err)
}
defer export.Clean()

exportHead, err := refRevision(ctx, export.dir, "HEAD")
if err != nil {
Expand All @@ -42,3 +47,54 @@ func TestExportAtRevision(t *testing.T) {
t.Errorf("exported %s, but head in export dir %s is %s", headMinusOne, export.dir, exportHead)
}
}

func TestExportFailsCheckout(t *testing.T) {
newDir, cleanup := testfiles.TempDir(t)
defer cleanup()

ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()

err := createRepo(newDir, []string{"config"})
if err != nil {
t.Fatal(err)
}
repo := NewRepo(Remote{URL: newDir}, ReadOnly)
defer repo.Clean()
if err := repo.Ready(ctx); err != nil {
t.Fatal(err)
}

findWorkingDirs := func() map[string]bool {
entries, err := ioutil.ReadDir(os.TempDir())
if err != nil {
t.Fatalf("ioutil.ReadDir(%q)=%v, want no error", os.TempDir(), err)
}

found := make(map[string]bool, len(entries))
for _, e := range entries {
// Makes an assumption about the location a repo export will happen.
if strings.HasPrefix(e.Name(), "flux-working") {
found[e.Name()] = true
}
}
return found
}

prior := findWorkingDirs()

// Try to check out a revision that does not exist:
export, err := repo.Export(ctx, "0000000000000000000000000000000000000000")
if err == nil {
t.Fatal("want repo.Export(\"0000000000000000000000000000000000000000\") to fail, succeeded instead")
}
if export != nil {
t.Fatalf("repo.Export(\"0000000000000000000000000000000000000000\")=%+v, want nill", export)
}

for d := range findWorkingDirs() {
if !prior[d] {
t.Errorf("Found %q in %q - failed to cleanup when returning an error", d, os.TempDir())
}
}
}

0 comments on commit 1c55caf

Please sign in to comment.