More file closing issues (test) (#386)

* Refactored tu.TempDir

* More files fiddling
This commit is contained in:
Neil O'Toole 2024-01-29 19:34:10 -07:00 committed by GitHub
parent c4ef4b1c9f
commit fd070eefd3
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
10 changed files with 137 additions and 54 deletions

View File

@ -39,7 +39,7 @@ func TestUpgrade(t *testing.T) {
testh.SetBuildVersion(t, nextVers) testh.SetBuildVersion(t, nextVers)
// The sq.yml file in cfgDir is on v0.33.0 // The sq.yml file in cfgDir is on v0.33.0
cfgDir := tu.DirCopy(t, "testdata", true) cfgDir := tu.DirCopy(t, "testdata")
t.Setenv(config.EnvarConfig, cfgDir) t.Setenv(config.EnvarConfig, cfgDir)
cfgFilePath := filepath.Join(cfgDir, "sq.yml") cfgFilePath := filepath.Join(cfgDir, "sq.yml")

View File

@ -131,14 +131,14 @@ func newRun(ctx context.Context, tb testing.TB,
// the test runs may execute in parallel inside the same test binary // the test runs may execute in parallel inside the same test binary
// process, thus breaking the pid-based lockfile mechanism. // process, thus breaking the pid-based lockfile mechanism.
if cacheDir == "" { if cacheDir == "" {
cacheDir = tu.CacheDir(tb, false) cacheDir = tu.TempDir(tb, "cache")
} }
ru.Files, err = files.New( ru.Files, err = files.New(
ctx, ctx,
ru.OptionsRegistry, ru.OptionsRegistry,
testh.TempLockFunc(tb), testh.TempLockFunc(tb),
tu.TempDir(tb, false), tu.TempDir(tb, "temp"),
cacheDir, cacheDir,
) )
require.NoError(tb, err) require.NoError(tb, err)

View File

@ -156,10 +156,21 @@ func TestQuerySQL_Count(t *testing.T) {
func TestEmptyAsNull(t *testing.T) { func TestEmptyAsNull(t *testing.T) {
t.Parallel() t.Parallel()
// We've added a bunch of tu.OpenFileCount calls to debug
// windows file close ordering issues. These should be
// removed when done.
t.Cleanup(func() {
tu.OpenFileCount(t, true, "earliest cleanup (last exec)") // FIXME: delete this line
})
th := testh.New(t) th := testh.New(t)
tu.OpenFileCount(t, true, "top") // FIXME: delete this line
sink, err := th.QuerySLQ(sakila.CSVAddress+`| .data | .[0:1]`, nil) sink, err := th.QuerySLQ(sakila.CSVAddress+`| .data | .[0:1]`, nil)
require.NoError(t, err) require.NoError(t, err)
require.Equal(t, 1, len(sink.Recs)) require.Equal(t, 1, len(sink.Recs))
tu.OpenFileCount(t, true, "after sink") // FIXME: delete this line
require.Equal(t, stringz.Strings(sakila.TblAddressColKinds()), stringz.Strings(sink.RecMeta.Kinds())) require.Equal(t, stringz.Strings(sakila.TblAddressColKinds()), stringz.Strings(sink.RecMeta.Kinds()))
@ -181,6 +192,7 @@ func TestEmptyAsNull(t *testing.T) {
for i := range want { for i := range want {
require.EqualValues(t, want[i], rec0[i], "field [%d]", i) require.EqualValues(t, want[i], rec0[i], "field [%d]", i)
} }
tu.OpenFileCount(t, true, "bottom") // FIXME: delete this line
} }
func TestIngest_DuplicateColumns(t *testing.T) { func TestIngest_DuplicateColumns(t *testing.T) {

View File

@ -5,6 +5,10 @@ package lgt
import ( import (
"io" "io"
"log/slog" "log/slog"
"os"
"testing"
"github.com/neilotoole/sq/libsq/core/lg/lga"
"github.com/neilotoole/slogt" "github.com/neilotoole/slogt"
@ -18,4 +22,6 @@ func init() { //nolint:gochecknoinits
} }
// New delegates to slogt.New. // New delegates to slogt.New.
var New = slogt.New func New(tb testing.TB) *slog.Logger { //nolint:thelper
return slogt.New(tb).With(lga.Pid, os.Getpid())
}

View File

@ -428,13 +428,13 @@ func (fs *Files) doCacheClearAll(ctx context.Context) error {
// REVISIT: This doesn't really do as much as desired. It should // REVISIT: This doesn't really do as much as desired. It should
// also be able to detect orphaned src cache dirs and delete those. // also be able to detect orphaned src cache dirs and delete those.
func (fs *Files) doCacheSweep() { func (fs *Files) doCacheSweep() {
ctx, cancelFn := context.WithTimeout(context.Background(), time.Millisecond*100) ctx, cancelFn := context.WithTimeout(context.Background(), time.Millisecond*500)
defer cancelFn() defer cancelFn()
log := fs.log.With(lga.Dir, fs.cacheDir) log := fs.log.With(lga.Dir, fs.cacheDir)
if unlock, err := fs.cfgLockFn(ctx); err != nil { if unlock, err := fs.cfgLockFn(ctx); err != nil {
log.Error("Sweep cache dir: failed to acquire config lock", lga.Lock, fs.cfgLockFn, lga.Err, err) log.Warn("Sweep cache dir: failed to acquire config lock", lga.Lock, fs.cfgLockFn, lga.Err, err)
return return
} else { } else {
defer unlock() defer unlock()

View File

@ -55,7 +55,13 @@ func TestFiles_DetectType(t *testing.T) {
t.Run(filepath.Base(tc.loc), func(t *testing.T) { t.Run(filepath.Base(tc.loc), func(t *testing.T) {
ctx := lg.NewContext(context.Background(), lgt.New(t)) ctx := lg.NewContext(context.Background(), lgt.New(t))
fs, err := files.New(ctx, nil, testh.TempLockFunc(t), tu.TempDir(t, true), tu.CacheDir(t, true)) fs, err := files.New(
ctx,
nil,
testh.TempLockFunc(t),
tu.TempDir(t, "temp"),
tu.TempDir(t, "cache"),
)
require.NoError(t, err) require.NoError(t, err)
fs.AddDriverDetectors(testh.DriverDetectors()...) fs.AddDriverDetectors(testh.DriverDetectors()...)
@ -105,8 +111,8 @@ func TestFiles_DriverType(t *testing.T) {
ctx, ctx,
nil, nil,
testh.TempLockFunc(t), testh.TempLockFunc(t),
tu.TempDir(t, false), tu.TempDir(t, "temp"),
tu.CacheDir(t, false), tu.TempDir(t, "cache"),
) )
require.NoError(t, err) require.NoError(t, err)
defer func() { assert.NoError(t, fs.Close()) }() defer func() { assert.NoError(t, fs.Close()) }()
@ -167,7 +173,13 @@ func TestFiles_NewReader(t *testing.T) {
Location: proj.Abs(fpath), Location: proj.Abs(fpath),
} }
fs, err := files.New(ctx, nil, testh.TempLockFunc(t), tu.TempDir(t, true), tu.CacheDir(t, true)) fs, err := files.New(
ctx,
nil,
testh.TempLockFunc(t),
tu.TempDir(t, "tmp"),
tu.TempDir(t, "cache"),
)
require.NoError(t, err) require.NoError(t, err)
g := &errgroup.Group{} g := &errgroup.Group{}

View File

@ -33,7 +33,7 @@ func TestDownloader(t *testing.T) {
log := lgt.New(t) log := lgt.New(t)
ctx := lg.NewContext(context.Background(), log) ctx := lg.NewContext(context.Background(), log)
cacheDir := t.TempDir() cacheDir := tu.TempDir(t)
dl, gotErr := downloader.New(t.Name(), httpz.NewDefaultClient(), dlURL, cacheDir) dl, gotErr := downloader.New(t.Name(), httpz.NewDefaultClient(), dlURL, cacheDir)
require.NoError(t, gotErr) require.NoError(t, gotErr)
@ -110,7 +110,7 @@ func TestDownloader_redirect(t *testing.T) {
const hello = `Hello World!` const hello = `Hello World!`
serveBody := hello serveBody := hello
lastModified := time.Now().UTC() lastModified := time.Now().UTC()
cacheDir := t.TempDir() cacheDir := tu.TempDir(t)
log := lgt.New(t) log := lgt.New(t)
var srvr *httptest.Server var srvr *httptest.Server
@ -222,7 +222,7 @@ func TestCachePreservedOnFailedRefresh(t *testing.T) {
})) }))
t.Cleanup(srvr.Close) t.Cleanup(srvr.Close)
cacheDir := filepath.Join(t.TempDir(), stringz.UniqSuffix("dlcache")) cacheDir := filepath.Join(tu.TempDir(t), stringz.UniqSuffix("dlcache"))
dl, err := downloader.New(t.Name(), httpz.NewDefaultClient(), srvr.URL, cacheDir) dl, err := downloader.New(t.Name(), httpz.NewDefaultClient(), srvr.URL, cacheDir)
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, dl.Clear(ctx)) require.NoError(t, dl.Clear(ctx))

View File

@ -163,8 +163,8 @@ func (h *Helper) init() {
h.Context, h.Context,
optRegistry, optRegistry,
TempLockFunc(h.T), TempLockFunc(h.T),
tu.TempDir(h.T, false), tu.TempDir(h.T, "temp"),
tu.TempDir(h.T, false), tu.TempDir(h.T, "cache"),
) )
require.NoError(h.T, err) require.NoError(h.T, err)
@ -320,7 +320,7 @@ func (h *Helper) Source(handle string) *source.Source {
srcPath, err := sqlite3.PathFromLocation(src) srcPath, err := sqlite3.PathFromLocation(src)
require.NoError(t, err) require.NoError(t, err)
dstPath := filepath.Join(tu.TempDir(t, false), filepath.Base(srcPath)) dstPath := filepath.Join(tu.TempDir(t), filepath.Base(srcPath))
require.NoError(t, ioz.CopyFile(dstPath, srcPath, true)) require.NoError(t, ioz.CopyFile(dstPath, srcPath, true))
src.Location = sqlite3.Prefix + dstPath src.Location = sqlite3.Prefix + dstPath
} }
@ -623,6 +623,7 @@ func (h *Helper) QuerySQL(src *source.Source, db sqlz.DB, query string, args ...
// QuerySLQ executes the SLQ query. Args are predefined variables for // QuerySLQ executes the SLQ query. Args are predefined variables for
// substitution. // substitution.
func (h *Helper) QuerySLQ(query string, args map[string]string) (*RecordSink, error) { func (h *Helper) QuerySLQ(query string, args map[string]string) (*RecordSink, error) {
// h.init()
// We need to ensure that each of the handles in the query is loaded. // We need to ensure that each of the handles in the query is loaded.
a, err := ast.Parse(h.Log(), query) a, err := ast.Parse(h.Log(), query)
require.NoError(h.T, err) require.NoError(h.T, err)
@ -897,7 +898,7 @@ func SetBuildVersion(tb testing.TB, vers string) {
// TempLockfile returns a lockfile.Lockfile that uses a temp file. // TempLockfile returns a lockfile.Lockfile that uses a temp file.
func TempLockfile(tb testing.TB) lockfile.Lockfile { func TempLockfile(tb testing.TB) lockfile.Lockfile {
tb.Helper() tb.Helper()
return lockfile.Lockfile(tu.TempFile(tb, "pid.lock", false)) return lockfile.Lockfile(tu.TempFile(tb, "pid.lock"))
} }
// TempLockFunc returns a lockfile.LockFunc that uses a temp file. // TempLockFunc returns a lockfile.LockFunc that uses a temp file.

View File

@ -2,17 +2,24 @@
package tu package tu
import ( import (
"crypto/rand"
"fmt" "fmt"
"hash/crc32"
"io" "io"
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
"reflect" "reflect"
"strconv"
"strings" "strings"
"sync/atomic"
"testing" "testing"
"time"
"unicode" "unicode"
"unicode/utf8" "unicode/utf8"
"github.com/neilotoole/sq/testh/proj"
"github.com/otiai10/copy" "github.com/otiai10/copy"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
@ -237,18 +244,11 @@ func RequireTake[C any](tb testing.TB, c <-chan C, msgAndArgs ...any) {
} }
// DirCopy copies the contents of sourceDir to a temp dir. // DirCopy copies the contents of sourceDir to a temp dir.
// If keep is false, temp dir will be cleaned up on test exit. func DirCopy(tb testing.TB, sourceDir string) (tmpDir string) {
func DirCopy(tb testing.TB, sourceDir string, keep bool) (tmpDir string) {
tb.Helper() tb.Helper()
var err error
if keep {
tmpDir, err = os.MkdirTemp("", sanitizeTestName(tb.Name())+"_*")
require.NoError(tb, err)
} else {
tmpDir = tb.TempDir()
}
err = copy.Copy(sourceDir, tmpDir) tmpDir = TempDir(tb)
err := copy.Copy(sourceDir, tmpDir)
require.NoError(tb, err) require.NoError(tb, err)
tb.Logf("Copied %s -> %s", sourceDir, tmpDir) tb.Logf("Copied %s -> %s", sourceDir, tmpDir)
return tmpDir return tmpDir
@ -256,7 +256,7 @@ func DirCopy(tb testing.TB, sourceDir string, keep bool) (tmpDir string) {
// sanitizeTestName sanitizes a test name. This impl is copied // sanitizeTestName sanitizes a test name. This impl is copied
// from testing.T.TempDir. // from testing.T.TempDir.
func sanitizeTestName(name string) string { func sanitizeTestName(name string) string { //nolint:unused
// Drop unusual characters (such as path separators or // Drop unusual characters (such as path separators or
// characters interacting with globs) from the directory name to // characters interacting with globs) from the directory name to
// avoid surprising os.MkdirTemp behavior. // avoid surprising os.MkdirTemp behavior.
@ -319,7 +319,7 @@ func Chdir(tb testing.TB, dir string) (absDir string) {
} }
if dir == "" { if dir == "" {
tmpDir := tb.TempDir() tmpDir := TempDir(tb)
tb.Cleanup(func() { tb.Cleanup(func() {
_ = os.Remove(tmpDir) _ = os.Remove(tmpDir)
}) })
@ -376,40 +376,69 @@ func MustStat(tb testing.TB, fp string) os.FileInfo {
return fi return fi
} }
// TempDir is the standard means for obtaining a temp dir for tests. func randString() string {
// If arg clean is true, the temp dir is created via t.TempDir, and b := make([]byte, 128)
// thus is deleted on test cleanup. _, _ = rand.Read(b)
func TempDir(tb testing.TB, clean bool) string { cs := crc32.ChecksumIEEE(b)
tb.Helper() return fmt.Sprintf("%x", cs)
if clean {
return filepath.Join(tb.TempDir(), "sq-test", "tmp")
} }
fp := filepath.Join(os.TempDir(), "sq-test", stringz.Uniq8(), "tmp") var dirCount = &atomic.Int64{}
require.NoError(tb, ioz.RequireDir(fp))
// TempDir is the standard means for obtaining a temp dir for tests.
// A new, unique temp dir is returned on each call. If arg subs is
// non-empty, that sub-directory structure is created within the
// parent temp dir. The returned value is an absolute path of
// the form:
//
// # tu.TempDir(t):
// /var/folders/68/qthw...0gn/T/sq/test/testh/tu/TestTempDir/69226/2_1706579687990637_f8f226d0
//
// # tu.TempDir(t, "foo", "bar"):
// /var/folders/68/qthw...0gn/T/sq/test/testh/tu/TestTempDir/69226/3_1706579687990706_efb99710/foo/bar
//
// The returned dir is a subdir of os.TempDir(), and includes the package path
// and test name (sanitized), as well as the pid, created dir count, timestamp,
// and random value. We use this structure to make it easier to identify the
// calling test when debugging. The dir is created with perms 0777.
//
// The caller is responsible for removing the dir if desired - it is NOT
// automatically deleted via t.Cleanup.
func TempDir(tb testing.TB, subs ...string) string {
tb.Helper()
dir, err := os.Getwd()
require.NoError(tb, err)
dir = strings.TrimPrefix(dir, proj.Dir())
fp := filepath.Join(
os.TempDir(),
"sq",
"test",
dir,
stringz.SanitizeFilename(tb.Name()),
strconv.Itoa(os.Getpid()),
fmt.Sprintf(
"%d_%d_%s",
dirCount.Add(1),
time.Now().UnixMicro(),
randString(),
))
for _, sub := range subs {
fp = filepath.Join(fp, sub)
}
err = os.MkdirAll(fp, 0o777)
require.NoError(tb, err)
return fp return fp
} }
// TempFile returns the path to a temp file with the given name, in a unique // TempFile returns the path to a temp file with the given name, in a unique
// temp dir. The file is not created. If arg clean is true, the parent temp // temp dir. The file is not created.
// dir is created via t.TempDir, and thus is deleted on test cleanup. func TempFile(tb testing.TB, name string) string {
func TempFile(tb testing.TB, name string, clean bool) string {
tb.Helper() tb.Helper()
fp := filepath.Join(TempDir(tb, clean), name) fp := filepath.Join(TempDir(tb), name)
return fp
}
// CacheDir is the standard means for obtaining a cache dir for tests.
// If arg clean is true, the cache dir is created via t.TempDir, and
// thus is deleted on test cleanup.
func CacheDir(tb testing.TB, clean bool) string {
tb.Helper()
if clean {
return filepath.Join(tb.TempDir(), "sq-test", "cache")
}
fp := filepath.Join(os.TempDir(), "sq-test", stringz.Uniq8(), "cache")
require.NoError(tb, ioz.RequireDir(fp))
return fp return fp
} }

View File

@ -1,8 +1,12 @@
package tu package tu
import ( import (
"path/filepath"
"strings"
"testing" "testing"
"github.com/neilotoole/sq/libsq/core/lg/lgt"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -93,3 +97,22 @@ func TestInterfaceSlice(t *testing.T) {
_ = AnySlice(42) _ = AnySlice(42)
}, "should panic for non-slice arg") }, "should panic for non-slice arg")
} }
func TestTempDir(t *testing.T) {
log := lgt.New(t)
log.Debug("huzzxah")
td1 := TempDir(t)
t.Logf("td1: %s", td1)
require.NotEmpty(t, td1)
require.DirExists(t, td1)
td2 := TempDir(t)
t.Logf("td2: %s", td2)
require.NotEqual(t, td1, td2)
td3 := TempDir(t, "foo", "bar")
t.Logf("td3: %s", td3)
require.True(t, strings.HasSuffix(td3, filepath.Join("foo", "bar")))
}