mirror of
https://github.com/neilotoole/sq.git
synced 2024-11-24 11:54:37 +03:00
Pass context to config.Store (#207)
This commit is contained in:
parent
d5b98ea810
commit
a766340382
14
CHANGELOG.md
14
CHANGELOG.md
@ -5,6 +5,20 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## Upcoming
|
||||
|
||||
This release overhauls `sq`'s config mechanism.
|
||||
|
||||
### Added
|
||||
|
||||
- `sq config get` prints config. DOCS
|
||||
- `sq config location` prints the location of the config dir.
|
||||
- `--config` flag is now honored globally.
|
||||
|
||||
### Changed
|
||||
|
||||
- Envar `SQ_CONFIG` replaces `SQ_CONFIGDIR`.
|
||||
|
||||
## [v0.33.0] - 2023-04-15
|
||||
|
||||
The headline feature is [source groups](https://sq.io/docs/source#groups).
|
||||
|
16
cli/cli.go
16
cli/cli.go
@ -281,12 +281,12 @@ func addCmd(rc *RunContext, parentCmd, cmd *cobra.Command) *cobra.Command {
|
||||
|
||||
// defaultLogging returns a log (and its associated closer) if
|
||||
// logging has been enabled via envars.
|
||||
func defaultLogging() (*slog.Logger, *cleanup.Cleanup, error) {
|
||||
func defaultLogging() (*slog.Logger, slog.Handler, *cleanup.Cleanup, error) {
|
||||
truncate, _ := strconv.ParseBool(os.Getenv(config.EnvarLogTruncate))
|
||||
|
||||
logFilePath, ok := os.LookupEnv(config.EnvarLogPath)
|
||||
if !ok || logFilePath == "" || strings.TrimSpace(logFilePath) == "" {
|
||||
return lg.Discard(), nil, nil
|
||||
return lg.Discard(), nil, nil, nil
|
||||
}
|
||||
|
||||
// Let's try to create the dir holding the logfile... if it already exists,
|
||||
@ -294,7 +294,7 @@ func defaultLogging() (*slog.Logger, *cleanup.Cleanup, error) {
|
||||
parent := filepath.Dir(logFilePath)
|
||||
err := os.MkdirAll(parent, 0o750)
|
||||
if err != nil {
|
||||
return lg.Discard(), nil, errz.Wrapf(err, "failed to create parent dir of log file %s", logFilePath)
|
||||
return lg.Discard(), nil, nil, errz.Wrapf(err, "failed to create parent dir of log file %s", logFilePath)
|
||||
}
|
||||
|
||||
fileFlag := os.O_APPEND
|
||||
@ -304,7 +304,7 @@ func defaultLogging() (*slog.Logger, *cleanup.Cleanup, error) {
|
||||
|
||||
logFile, err := os.OpenFile(logFilePath, os.O_RDWR|os.O_CREATE|fileFlag, 0o600)
|
||||
if err != nil {
|
||||
return lg.Discard(), nil, errz.Wrapf(err, "unable to open log file: %s", logFilePath)
|
||||
return lg.Discard(), nil, nil, errz.Wrapf(err, "unable to open log file: %s", logFilePath)
|
||||
}
|
||||
clnup := cleanup.New().AddE(logFile.Close)
|
||||
|
||||
@ -317,15 +317,13 @@ func defaultLogging() (*slog.Logger, *cleanup.Cleanup, error) {
|
||||
return a
|
||||
}
|
||||
|
||||
opts := slog.HandlerOptions{
|
||||
h := slog.HandlerOptions{
|
||||
AddSource: true,
|
||||
Level: slog.LevelDebug,
|
||||
ReplaceAttr: replace,
|
||||
}
|
||||
}.NewJSONHandler(logFile)
|
||||
|
||||
log := slog.New(opts.NewJSONHandler(logFile))
|
||||
|
||||
return log, clnup, nil
|
||||
return slog.New(h), h, clnup, nil
|
||||
}
|
||||
|
||||
// printError is the centralized function for printing
|
||||
|
@ -51,9 +51,10 @@ func TestSmoke(t *testing.T) {
|
||||
|
||||
t.Run(strings.Join(tc.a, "_"), func(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := context.Background()
|
||||
|
||||
rc, out, errOut := newTestRunCtx(t, nil)
|
||||
err := cli.ExecuteWith(context.Background(), rc, tc.a)
|
||||
rc, out, errOut := newTestRunCtx(ctx, t, nil)
|
||||
err := cli.ExecuteWith(ctx, rc, tc.a)
|
||||
|
||||
// We log sq's output before doing assert, because it reads
|
||||
// better in testing's output that way.
|
||||
@ -141,7 +142,7 @@ func TestOutputRaw(t *testing.T) {
|
||||
os.RemoveAll(outputPath)
|
||||
})
|
||||
|
||||
ru := newRun(t, nil).add(*src).hush()
|
||||
ru := newRun(th.Context, t, nil).add(*src).hush()
|
||||
err = ru.Exec("sql", "--raw", "--output="+outputPath, query)
|
||||
require.NoError(t, err)
|
||||
|
||||
@ -152,7 +153,7 @@ func TestOutputRaw(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
// 2. Now test that stdout also gets the same data
|
||||
ru = newRun(t, nil).add(*src).hush()
|
||||
ru = newRun(th.Context, t, nil).add(*src).hush()
|
||||
err = ru.Exec("sql", "--raw", query)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, wantBytes, ru.out.Bytes())
|
||||
|
@ -263,7 +263,7 @@ func execSrcAdd(cmd *cobra.Command, args []string) error {
|
||||
}
|
||||
}
|
||||
|
||||
if err = rc.ConfigStore.Save(rc.Config); err != nil {
|
||||
if err = rc.ConfigStore.Save(cmd.Context(), rc.Config); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
package cli_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
@ -83,7 +84,7 @@ func TestCmdAdd(t *testing.T) {
|
||||
args = append(args, "--driver="+tc.driver)
|
||||
}
|
||||
|
||||
ru := newRun(t, nil)
|
||||
ru := newRun(th.Context, t, nil)
|
||||
err := ru.Exec(args...)
|
||||
if tc.wantErr {
|
||||
require.Error(t, err)
|
||||
@ -104,10 +105,11 @@ func TestCmdAdd(t *testing.T) {
|
||||
// TestCmdAdd_SQLite_Path has additional tests for sqlite paths.
|
||||
func TestCmdAdd_SQLite_Path(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := context.Background()
|
||||
|
||||
const h1 = `@s1`
|
||||
|
||||
ru := newRun(t, nil)
|
||||
ru := newRun(ctx, t, nil)
|
||||
require.NoError(t, ru.Exec("add", "-j", "sqlite3://test.db", "-h", h1))
|
||||
got := ru.BindMap()
|
||||
|
||||
@ -121,44 +123,45 @@ func TestCmdAdd_SQLite_Path(t *testing.T) {
|
||||
|
||||
func TestCmdAdd_Active(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := context.Background()
|
||||
|
||||
const h1, h2, h3, h4 = "@h1", "@h2", "@h3", "@h4"
|
||||
|
||||
// Verify that initially there are no sources.
|
||||
ru := newRun(t, nil)
|
||||
ru := newRun(ctx, t, nil)
|
||||
require.NoError(t, ru.Exec("ls"))
|
||||
require.Zero(t, ru.out.Len())
|
||||
|
||||
// Add a new source. It should become the active source.
|
||||
ru = newRun(t, ru)
|
||||
ru = newRun(ctx, t, ru)
|
||||
require.NoError(t, ru.Exec("add", proj.Abs(sakila.PathCSVActor), "-h", h1))
|
||||
ru = newRun(t, ru)
|
||||
ru = newRun(ctx, t, ru)
|
||||
require.NoError(t, ru.Exec("src", "-j"))
|
||||
m := ru.BindMap()
|
||||
require.Equal(t, h1, m["handle"])
|
||||
|
||||
// Add a second src, without the --active flag. The active src
|
||||
// should remain h1.
|
||||
ru = newRun(t, ru)
|
||||
ru = newRun(ctx, t, ru)
|
||||
require.NoError(t, ru.Exec("add", proj.Abs(sakila.PathCSVActor), "-h", h2))
|
||||
ru = newRun(t, ru)
|
||||
ru = newRun(ctx, t, ru)
|
||||
require.NoError(t, ru.Exec("src", "-j"))
|
||||
m = ru.BindMap()
|
||||
require.Equal(t, h1, m["handle"], "active source should still be %s", h1)
|
||||
|
||||
// Add a third src, this time with the --active flag. The active src
|
||||
// should become h3.
|
||||
ru = newRun(t, ru)
|
||||
ru = newRun(ctx, t, ru)
|
||||
require.NoError(t, ru.Exec("add", proj.Abs(sakila.PathCSVActor), "-h", h3, "--active"))
|
||||
ru = newRun(t, ru)
|
||||
ru = newRun(ctx, t, ru)
|
||||
require.NoError(t, ru.Exec("src", "-j"))
|
||||
m = ru.BindMap()
|
||||
require.Equal(t, h3, m["handle"], "active source now be %s", h3)
|
||||
|
||||
// Same again with a fourth src, but this time using the shorthand -a flag.
|
||||
ru = newRun(t, ru)
|
||||
ru = newRun(ctx, t, ru)
|
||||
require.NoError(t, ru.Exec("add", proj.Abs(sakila.PathCSVActor), "-h", h4, "-a"))
|
||||
ru = newRun(t, ru)
|
||||
ru = newRun(ctx, t, ru)
|
||||
require.NoError(t, ru.Exec("src", "-j"))
|
||||
m = ru.BindMap()
|
||||
require.Equal(t, h4, m["handle"], "active source now be %s", h4)
|
||||
|
@ -62,7 +62,7 @@ func execGroup(cmd *cobra.Command, args []string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := rc.ConfigStore.Save(cfg); err != nil {
|
||||
if err := rc.ConfigStore.Save(cmd.Context(), cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
package cli_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"os"
|
||||
"testing"
|
||||
@ -58,7 +59,7 @@ func TestCmdInspect(t *testing.T) {
|
||||
th := testh.New(t)
|
||||
src := th.Source(tc.handle)
|
||||
|
||||
ru := newRun(t, nil).add(*src)
|
||||
ru := newRun(th.Context, t, nil).add(*src)
|
||||
|
||||
err := ru.Exec("inspect", "--json")
|
||||
if tc.wantErr {
|
||||
@ -80,11 +81,11 @@ func TestCmdInspectSmoke(t *testing.T) {
|
||||
th := testh.New(t)
|
||||
src := th.Source(sakila.SL3)
|
||||
|
||||
ru := newRun(t, nil)
|
||||
ru := newRun(th.Context, t, nil)
|
||||
err := ru.Exec("inspect")
|
||||
require.Error(t, err, "should fail because no active src")
|
||||
|
||||
ru = newRun(t, nil)
|
||||
ru = newRun(th.Context, t, nil)
|
||||
ru.add(*src) // now have an active src
|
||||
|
||||
err = ru.Exec("inspect", "--json")
|
||||
@ -98,7 +99,7 @@ func TestCmdInspectSmoke(t *testing.T) {
|
||||
require.Equal(t, sakila.AllTblsViews(), md.TableNames())
|
||||
|
||||
// Try one more source for good measure
|
||||
ru = newRun(t, nil)
|
||||
ru = newRun(th.Context, t, nil)
|
||||
src = th.Source(sakila.CSVActor)
|
||||
ru.add(*src)
|
||||
|
||||
@ -128,10 +129,11 @@ func TestCmdInspect_Stdin(t *testing.T) {
|
||||
tc := tc
|
||||
|
||||
t.Run(tutil.Name(tc.fpath), func(t *testing.T) {
|
||||
ctx := context.Background()
|
||||
f, err := os.Open(tc.fpath) // No need to close f
|
||||
require.NoError(t, err)
|
||||
|
||||
ru := newRun(t, nil)
|
||||
ru := newRun(ctx, t, nil)
|
||||
ru.rc.Stdin = f
|
||||
|
||||
err = ru.Exec("inspect", "--json")
|
||||
|
@ -75,7 +75,7 @@ func execMoveRenameGroup(cmd *cobra.Command, oldGroup, newGroup string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = rc.ConfigStore.Save(rc.Config); err != nil {
|
||||
if err = rc.ConfigStore.Save(cmd.Context(), rc.Config); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -105,7 +105,7 @@ func execMoveHandleToGroup(cmd *cobra.Command, oldHandle, newGroup string) error
|
||||
return err
|
||||
}
|
||||
|
||||
if err = rc.ConfigStore.Save(rc.Config); err != nil {
|
||||
if err = rc.ConfigStore.Save(cmd.Context(), rc.Config); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -127,7 +127,7 @@ func execMoveRenameHandle(cmd *cobra.Command, oldHandle, newHandle string) error
|
||||
return err
|
||||
}
|
||||
|
||||
if err = rc.ConfigStore.Save(rc.Config); err != nil {
|
||||
if err = rc.ConfigStore.Save(cmd.Context(), rc.Config); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -1,6 +1,7 @@
|
||||
package cli_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"testing"
|
||||
"time"
|
||||
@ -14,14 +15,15 @@ import (
|
||||
|
||||
func TestCmdPing(t *testing.T) {
|
||||
t.Parallel()
|
||||
ctx := context.Background()
|
||||
|
||||
err := newRun(t, nil).Exec("ping")
|
||||
err := newRun(ctx, t, nil).Exec("ping")
|
||||
require.Error(t, err, "no active data source")
|
||||
|
||||
err = newRun(t, nil).Exec("ping", "invalid_handle")
|
||||
err = newRun(ctx, t, nil).Exec("ping", "invalid_handle")
|
||||
require.Error(t, err)
|
||||
|
||||
err = newRun(t, nil).Exec("ping", "@not_a_handle")
|
||||
err = newRun(ctx, t, nil).Exec("ping", "@not_a_handle")
|
||||
require.Error(t, err)
|
||||
|
||||
var ru *Run
|
||||
@ -29,17 +31,17 @@ func TestCmdPing(t *testing.T) {
|
||||
th := testh.New(t)
|
||||
src1, src2 := th.Source(sakila.CSVActor), th.Source(sakila.CSVActorNoHeader)
|
||||
|
||||
ru = newRun(t, nil).add(*src1)
|
||||
ru = newRun(ctx, t, nil).add(*src1)
|
||||
err = ru.Exec("ping", "--csv", src1.Handle)
|
||||
require.NoError(t, err)
|
||||
checkPingOutputCSV(t, ru, *src1)
|
||||
|
||||
ru = newRun(t, nil).add(*src2)
|
||||
ru = newRun(ctx, t, nil).add(*src2)
|
||||
err = ru.Exec("ping", "--csv", src2.Handle)
|
||||
require.NoError(t, err)
|
||||
checkPingOutputCSV(t, ru, *src2)
|
||||
|
||||
ru = newRun(t, nil).add(*src1, *src2)
|
||||
ru = newRun(ctx, t, nil).add(*src1, *src2)
|
||||
err = ru.Exec("ping", "--csv", src1.Handle, src2.Handle)
|
||||
require.NoError(t, err)
|
||||
checkPingOutputCSV(t, ru, *src1, *src2)
|
||||
|
@ -75,7 +75,7 @@ func execRemove(cmd *cobra.Command, args []string) error {
|
||||
}
|
||||
}
|
||||
|
||||
if err := rc.ConfigStore.Save(cfg); err != nil {
|
||||
if err := rc.ConfigStore.Save(cmd.Context(), cfg); err != nil {
|
||||
return err
|
||||
}
|
||||
lo.Uniq(removed)
|
||||
|
@ -13,13 +13,13 @@ func TestCmdRemove(t *testing.T) {
|
||||
th := testh.New(t)
|
||||
|
||||
// 1. Should fail if bad handle
|
||||
ru := newRun(t, nil)
|
||||
ru := newRun(th.Context, t, nil)
|
||||
err := ru.Exec("rm", "@not_a_source")
|
||||
require.Error(t, err)
|
||||
|
||||
// 2. Check normal operation
|
||||
src := th.Source(sakila.SL3)
|
||||
ru = newRun(t, nil).add(*src)
|
||||
ru = newRun(th.Context, t, nil).add(*src)
|
||||
|
||||
// The src we just added should be the active src
|
||||
activeSrc := ru.rc.Config.Collection.Active()
|
||||
|
@ -75,7 +75,7 @@ func execScratch(cmd *cobra.Command, args []string) error {
|
||||
}
|
||||
}
|
||||
|
||||
err = rc.ConfigStore.Save(rc.Config)
|
||||
err = rc.ConfigStore.Save(cmd.Context(), rc.Config)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ func TestCmdSLQ_Insert_Create(t *testing.T) {
|
||||
|
||||
destTbl := stringz.UniqSuffix(sakila.TblActor + "_copy")
|
||||
|
||||
ru := newRun(t, nil).add(*originSrc)
|
||||
ru := newRun(th.Context, t, nil).add(*originSrc)
|
||||
if destSrc.Handle != originSrc.Handle {
|
||||
ru.add(*destSrc)
|
||||
}
|
||||
@ -68,7 +68,7 @@ func TestCmdSLQ_Insert(t *testing.T) {
|
||||
// of it (without data).
|
||||
tblName := th.CopyTable(true, destSrc, sakila.TblActor, "", false)
|
||||
|
||||
ru := newRun(t, nil).add(*originSrc)
|
||||
ru := newRun(th.Context, t, nil).add(*originSrc)
|
||||
if destSrc.Handle != originSrc.Handle {
|
||||
ru.add(*destSrc)
|
||||
}
|
||||
@ -92,8 +92,9 @@ func TestCmdSLQ_Insert(t *testing.T) {
|
||||
func TestCmdSLQ_CSV(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
src := testh.New(t).Source(sakila.CSVActor)
|
||||
ru := newRun(t, nil).add(*src)
|
||||
th := testh.New(t)
|
||||
src := th.Source(sakila.CSVActor)
|
||||
ru := newRun(th.Context, t, nil).add(*src)
|
||||
err := ru.Exec("slq", "--header=false", "--csv", fmt.Sprintf("%s.data", src.Handle))
|
||||
require.NoError(t, err)
|
||||
|
||||
@ -105,8 +106,9 @@ func TestCmdSLQ_CSV(t *testing.T) {
|
||||
func TestCmdSLQ_OutputFlag(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
src := testh.New(t).Source(sakila.SL3)
|
||||
ru := newRun(t, nil).add(*src)
|
||||
th := testh.New(t)
|
||||
src := th.Source(sakila.SL3)
|
||||
ru := newRun(th.Context, t, nil).add(*src)
|
||||
outputFile, err := os.CreateTemp("", t.Name())
|
||||
require.NoError(t, err)
|
||||
|
||||
@ -143,7 +145,7 @@ func TestCmdSLQ_Join(t *testing.T) {
|
||||
th := testh.New(t)
|
||||
src1, src2 := th.Source(h1), th.Source(h2)
|
||||
|
||||
ru := newRun(t, nil).add(*src1)
|
||||
ru := newRun(th.Context, t, nil).add(*src1)
|
||||
if src2.Handle != src1.Handle {
|
||||
ru.add(*src2)
|
||||
}
|
||||
@ -168,10 +170,11 @@ func TestCmdSLQ_Join(t *testing.T) {
|
||||
// TestCmdSLQ_ActiveSrcHandle verifies that source.ActiveHandle is
|
||||
// interpreted as the active src in a SLQ query.
|
||||
func TestCmdSLQ_ActiveSrcHandle(t *testing.T) {
|
||||
src := testh.New(t).Source(sakila.SL3)
|
||||
th := testh.New(t)
|
||||
src := th.Source(sakila.SL3)
|
||||
|
||||
// 1. Verify that the query works as expected using the actual src handle
|
||||
ru := newRun(t, nil).add(*src).hush()
|
||||
ru := newRun(th.Context, t, nil).add(*src).hush()
|
||||
|
||||
require.Equal(t, src.Handle, ru.rc.Config.Collection.Active().Handle)
|
||||
err := ru.Exec("slq", "--header=false", "--csv", "@sakila_sl3.actor")
|
||||
@ -180,7 +183,7 @@ func TestCmdSLQ_ActiveSrcHandle(t *testing.T) {
|
||||
require.Equal(t, sakila.TblActorCount, len(recs))
|
||||
|
||||
// 2. Verify that it works using source.ActiveHandle as the src handle
|
||||
ru = newRun(t, nil).add(*src).hush()
|
||||
ru = newRun(th.Context, t, nil).add(*src).hush()
|
||||
require.Equal(t, src.Handle, ru.rc.Config.Collection.Active().Handle)
|
||||
err = ru.Exec("slq", "--header=false", "--csv", source.ActiveHandle+".actor")
|
||||
require.NoError(t, err)
|
||||
|
@ -1,6 +1,7 @@
|
||||
package cli_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
@ -44,7 +45,7 @@ func TestCmdSQL_Insert(t *testing.T) {
|
||||
// of it (without data).
|
||||
tblName := th.CopyTable(true, destSrc, sakila.TblActor, "", false)
|
||||
|
||||
ru := newRun(t, nil).add(*originSrc)
|
||||
ru := newRun(th.Context, t, nil).add(*originSrc)
|
||||
if destSrc.Handle != originSrc.Handle {
|
||||
ru.add(*destSrc)
|
||||
}
|
||||
@ -92,7 +93,7 @@ func TestCmdSQL_SelectFromUserDriver(t *testing.T) {
|
||||
th := testh.New(t)
|
||||
src := th.Source(handle)
|
||||
|
||||
ru := newRun(t, nil).add(*src)
|
||||
ru := newRun(th.Context, t, nil).add(*src)
|
||||
|
||||
udDefs := testh.DriverDefsFrom(t, testsrc.PathDriverDefPpl, testsrc.PathDriverDefRSS)
|
||||
for _, udDef := range udDefs {
|
||||
@ -146,7 +147,7 @@ func TestCmdSQL_StdinQuery(t *testing.T) {
|
||||
f, err := os.Open(tc.fpath)
|
||||
require.NoError(t, err)
|
||||
|
||||
ru := newRun(t, nil).hush()
|
||||
ru := newRun(context.Background(), t, nil).hush()
|
||||
ru.rc.Stdin = f
|
||||
|
||||
args := []string{"sql", "--header=false", "SELECT * FROM " + tc.tbl}
|
||||
|
@ -45,7 +45,7 @@ func execSrc(cmd *cobra.Command, args []string) error {
|
||||
return err
|
||||
}
|
||||
|
||||
err = rc.ConfigStore.Save(cfg)
|
||||
err = rc.ConfigStore.Save(cmd.Context(), cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ func TestCmdTblCopy(t *testing.T) {
|
||||
srcTblHandle := src.Handle + "." + sakila.TblActor
|
||||
destTbl1 := stringz.UniqTableName(sakila.TblActor)
|
||||
|
||||
ru1 := newRun(t, nil).add(*src)
|
||||
ru1 := newRun(th.Context, t, nil).add(*src)
|
||||
err := ru1.Exec("tbl", "copy", "--data=false", srcTblHandle, src.Handle+"."+destTbl1)
|
||||
require.NoError(t, err)
|
||||
defer th.DropTable(src, destTbl1)
|
||||
@ -32,7 +32,7 @@ func TestCmdTblCopy(t *testing.T) {
|
||||
"should not have copied any rows because --data=false")
|
||||
|
||||
// --data=true
|
||||
ru2 := newRun(t, nil).add(*src)
|
||||
ru2 := newRun(th.Context, t, nil).add(*src)
|
||||
destTbl2 := stringz.UniqTableName(sakila.TblActor)
|
||||
err = ru2.Exec("tbl", "copy", "--data=true", srcTblHandle, src.Handle+"."+destTbl2)
|
||||
require.NoError(t, err)
|
||||
@ -68,7 +68,7 @@ func TestCmdTblDrop(t *testing.T) {
|
||||
require.Equal(t, destTblName, tblMeta.Name)
|
||||
require.Equal(t, int64(sakila.TblActorCount), tblMeta.RowCount)
|
||||
|
||||
err = newRun(t, nil).add(*src).Exec("tbl", "drop", src.Handle+"."+destTblName)
|
||||
err = newRun(th.Context, t, nil).add(*src).Exec("tbl", "drop", src.Handle+"."+destTblName)
|
||||
require.NoError(t, err)
|
||||
needsDrop = false
|
||||
|
||||
@ -98,7 +98,7 @@ func TestCmdTblTruncate(t *testing.T) {
|
||||
require.Equal(t, destTblName, tblMeta.Name)
|
||||
require.Equal(t, int64(sakila.TblActorCount), tblMeta.RowCount)
|
||||
|
||||
err = newRun(t, nil).add(*src).Exec("tbl", "truncate", src.Handle+"."+destTblName)
|
||||
err = newRun(th.Context, t, nil).add(*src).Exec("tbl", "truncate", src.Handle+"."+destTblName)
|
||||
require.NoError(t, err)
|
||||
tblMeta, err = th.Open(src).TableMetadata(th.Context, destTblName)
|
||||
require.NoError(t, err)
|
||||
|
@ -1,6 +1,7 @@
|
||||
package config_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
@ -20,7 +21,7 @@ func TestFileStore_Nil_Save(t *testing.T) {
|
||||
var f *config.YAMLFileStore
|
||||
|
||||
// noinspection GoNilness
|
||||
err := f.Save(config.New())
|
||||
err := f.Save(context.Background(), config.New())
|
||||
require.Error(t, err)
|
||||
}
|
||||
|
||||
@ -31,7 +32,7 @@ func TestFileStore_LoadSaveLoad(t *testing.T) {
|
||||
fs := &config.YAMLFileStore{Path: "testdata/good.01.sq.yml", HookLoad: hookExpand}
|
||||
const expectGood01SrcCount = 34
|
||||
|
||||
cfg, err := fs.Load()
|
||||
cfg, err := fs.Load(context.Background())
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cfg)
|
||||
require.NotNil(t, cfg.Collection)
|
||||
@ -44,10 +45,10 @@ func TestFileStore_LoadSaveLoad(t *testing.T) {
|
||||
fs.Path = f.Name()
|
||||
t.Logf("writing to tmp file: %s", fs.Path)
|
||||
|
||||
err = fs.Save(cfg)
|
||||
err = fs.Save(context.Background(), cfg)
|
||||
require.NoError(t, err)
|
||||
|
||||
cfg2, err := fs.Load()
|
||||
cfg2, err := fs.Load(context.Background())
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cfg2)
|
||||
require.Equal(t, expectGood01SrcCount, len(cfg2.Collection.Sources()))
|
||||
@ -77,7 +78,7 @@ func TestFileStore_Load(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
fs.Path = match
|
||||
_, err = fs.Load()
|
||||
_, err = fs.Load(context.Background())
|
||||
require.NoError(t, err, match)
|
||||
})
|
||||
}
|
||||
@ -86,7 +87,7 @@ func TestFileStore_Load(t *testing.T) {
|
||||
match := match
|
||||
t.Run(tutil.Name(match), func(t *testing.T) {
|
||||
fs.Path = match
|
||||
_, err = fs.Load()
|
||||
_, err = fs.Load(context.Background())
|
||||
require.Error(t, err, match)
|
||||
})
|
||||
}
|
||||
|
@ -1,12 +1,16 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/neilotoole/sq/libsq/core/lg"
|
||||
"github.com/neilotoole/sq/libsq/core/lg/lga"
|
||||
|
||||
"github.com/neilotoole/sq/libsq/core/ioz"
|
||||
|
||||
"github.com/neilotoole/sq/cli/flag"
|
||||
@ -23,10 +27,10 @@ import (
|
||||
// Store saves and loads config.
|
||||
type Store interface {
|
||||
// Save writes config to the store.
|
||||
Save(cfg *Config) error
|
||||
Save(ctx context.Context, cfg *Config) error
|
||||
|
||||
// Load reads config from the store.
|
||||
Load() (*Config, error)
|
||||
Load(ctx context.Context) (*Config, error)
|
||||
|
||||
// Location returns the location of the store, typically
|
||||
// a file path.
|
||||
@ -71,7 +75,10 @@ func (fs *YAMLFileStore) Location() string {
|
||||
}
|
||||
|
||||
// Load reads config from disk. It implements Store.
|
||||
func (fs *YAMLFileStore) Load() (*Config, error) {
|
||||
func (fs *YAMLFileStore) Load(ctx context.Context) (*Config, error) {
|
||||
log := lg.FromContext(ctx)
|
||||
log.Debug("Loading config from file", lga.Path, fs.Path)
|
||||
|
||||
if fs.upgradeReg == nil {
|
||||
// Use the package-level registry by default.
|
||||
// This is not ideal, but test code can change this
|
||||
@ -85,28 +92,28 @@ func (fs *YAMLFileStore) Load() (*Config, error) {
|
||||
}
|
||||
|
||||
if mightNeedUpgrade {
|
||||
_, err = fs.UpgradeConfig(foundVers, buildinfo.Version)
|
||||
if err != nil {
|
||||
log.Info("Upgrade config?", lga.From, foundVers, lga.To, buildinfo.Version)
|
||||
if _, err = fs.UpgradeConfig(ctx, foundVers, buildinfo.Version); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// We do a cycle of loading and saving the config after the upgrade,
|
||||
// because the upgrade may have written YAML via a map, which
|
||||
// doesn't preserve order. Loading and saving should fix that.
|
||||
cfg, err := fs.doLoad()
|
||||
cfg, err := fs.doLoad(ctx)
|
||||
if err != nil {
|
||||
return nil, errz.Wrapf(err, "config: %s: load failed after config upgrade", fs.Path)
|
||||
}
|
||||
|
||||
if err = fs.Save(cfg); err != nil {
|
||||
if err = fs.Save(ctx, cfg); err != nil {
|
||||
return nil, errz.Wrapf(err, "config: %s: save failed after config upgrade", fs.Path)
|
||||
}
|
||||
}
|
||||
|
||||
return fs.doLoad()
|
||||
return fs.doLoad(ctx)
|
||||
}
|
||||
|
||||
func (fs *YAMLFileStore) doLoad() (*Config, error) {
|
||||
func (fs *YAMLFileStore) doLoad(ctx context.Context) (*Config, error) {
|
||||
bytes, err := os.ReadFile(fs.Path)
|
||||
if err != nil {
|
||||
return nil, errz.Wrapf(err, "config: failed to load file: %s", fs.Path)
|
||||
@ -132,7 +139,7 @@ func (fs *YAMLFileStore) doLoad() (*Config, error) {
|
||||
if err != nil {
|
||||
if repaired {
|
||||
// The config was repaired. Save the changes.
|
||||
err = errz.Combine(err, fs.Save(cfg))
|
||||
err = errz.Combine(err, fs.Save(ctx, cfg))
|
||||
}
|
||||
return nil, errz.Wrapf(err, "config: %s", fs.Path)
|
||||
}
|
||||
@ -208,7 +215,7 @@ func (fs *YAMLFileStore) loadExt(cfg *Config) error {
|
||||
}
|
||||
|
||||
// Save writes config to disk. It implements Store.
|
||||
func (fs *YAMLFileStore) Save(cfg *Config) error {
|
||||
func (fs *YAMLFileStore) Save(_ context.Context, cfg *Config) error {
|
||||
if fs == nil {
|
||||
return errz.New("config file store is nil")
|
||||
}
|
||||
@ -251,12 +258,12 @@ type DiscardStore struct{}
|
||||
var _ Store = (*DiscardStore)(nil)
|
||||
|
||||
// Load returns a new empty Config.
|
||||
func (DiscardStore) Load() (*Config, error) {
|
||||
func (DiscardStore) Load(context.Context) (*Config, error) {
|
||||
return New(), nil
|
||||
}
|
||||
|
||||
// Save is no-op.
|
||||
func (DiscardStore) Save(*Config) error {
|
||||
func (DiscardStore) Save(context.Context, *Config) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -267,7 +274,7 @@ func (DiscardStore) Location() string {
|
||||
|
||||
// DefaultLoad loads sq config from the default location
|
||||
// (~/.config/sq/sq.yml) or the location specified in envars.
|
||||
func DefaultLoad(osArgs []string) (*Config, Store, error) {
|
||||
func DefaultLoad(ctx context.Context, osArgs []string) (*Config, Store, error) {
|
||||
var (
|
||||
cfgDir string
|
||||
origin string
|
||||
@ -300,7 +307,7 @@ func DefaultLoad(osArgs []string) (*Config, Store, error) {
|
||||
}
|
||||
|
||||
// file does exist, let's try to load it
|
||||
cfg, err := cfgStore.Load()
|
||||
cfg, err := cfgStore.Load(ctx)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
@ -1,10 +1,14 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/neilotoole/sq/libsq/core/lg"
|
||||
"github.com/neilotoole/sq/libsq/core/lg/lga"
|
||||
|
||||
"github.com/neilotoole/sq/cli/buildinfo"
|
||||
"github.com/neilotoole/sq/libsq/core/errz"
|
||||
"golang.org/x/mod/semver"
|
||||
@ -50,12 +54,14 @@ func init() { //nolint:gochecknoinits
|
||||
// UpgradeConfig runs all the registered upgrade funcs between cfg.Version
|
||||
// and targetVersion. Typically this is checked by Load, but can be
|
||||
// explicitly invoked for testing etc.
|
||||
func (fs *YAMLFileStore) UpgradeConfig(startVersion, targetVersion string) (*Config, error) {
|
||||
func (fs *YAMLFileStore) UpgradeConfig(ctx context.Context, startVersion, targetVersion string) (*Config, error) {
|
||||
if !semver.IsValid(targetVersion) {
|
||||
return nil, errz.Errorf("invalid semver for config version {%s}", targetVersion)
|
||||
}
|
||||
|
||||
// fs.log.Debugf("Starting config upgrade: %s --> %s", cfg.Version, targetVersion)
|
||||
log := lg.FromContext(ctx)
|
||||
|
||||
log.Debug("Starting config upgrade", lga.From, startVersion, lga.To, targetVersion)
|
||||
var err error
|
||||
upgradeFns := fs.upgradeReg.getUpgradeFuncs(startVersion, targetVersion)
|
||||
|
||||
@ -67,15 +73,15 @@ func (fs *YAMLFileStore) UpgradeConfig(startVersion, targetVersion string) (*Con
|
||||
}
|
||||
|
||||
// Do a final update of the version
|
||||
cfg, err := fs.doLoad()
|
||||
cfg, err := fs.doLoad(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
cfg.Version = targetVersion
|
||||
|
||||
// fs.log.Debugf("Setting config_version to: %s", targetVersion)
|
||||
log.Debug("Setting config_version", lga.Val, targetVersion)
|
||||
|
||||
err = fs.Save(cfg)
|
||||
err = fs.Save(ctx, cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -1,12 +1,16 @@
|
||||
package config_test
|
||||
|
||||
import (
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/neilotoole/slogt"
|
||||
"github.com/neilotoole/sq/libsq/core/lg"
|
||||
|
||||
"github.com/neilotoole/sq/cli/buildinfo"
|
||||
|
||||
"github.com/neilotoole/sq/libsq/core/ioz"
|
||||
@ -30,6 +34,9 @@ func setBuildVersion(t testing.TB, vers string) {
|
||||
}
|
||||
|
||||
func Test_Upgrade_v0_34_0(t *testing.T) {
|
||||
log := slogt.New(t)
|
||||
ctx := lg.NewContext(context.Background(), log)
|
||||
|
||||
const (
|
||||
prevVers = "v0.33.0"
|
||||
nextVers = "v0.34.0"
|
||||
@ -52,7 +59,7 @@ func Test_Upgrade_v0_34_0(t *testing.T) {
|
||||
t.Logf("config file (before): %s", cfgFile)
|
||||
_ = ioz.FPrintFile(tutil.Writer(t), cfgFile)
|
||||
|
||||
cfg, cfgStore, err := config.DefaultLoad(nil)
|
||||
cfg, cfgStore, err := config.DefaultLoad(ctx, nil)
|
||||
require.NoError(t, err)
|
||||
|
||||
t.Logf("config file (after): %s", cfgFile)
|
||||
|
@ -27,7 +27,8 @@ import (
|
||||
// The srcs args are added to rc.Config.Collection.
|
||||
//
|
||||
// If cfgStore is nil, a new one is created in a temp dir.
|
||||
func newTestRunCtx(t testing.TB, cfgStore config.Store) (rc *cli.RunContext, out, errOut *bytes.Buffer) {
|
||||
func newTestRunCtx(ctx context.Context, t testing.TB, cfgStore config.Store,
|
||||
) (rc *cli.RunContext, out, errOut *bytes.Buffer) {
|
||||
out = &bytes.Buffer{}
|
||||
errOut = &bytes.Buffer{}
|
||||
|
||||
@ -39,9 +40,9 @@ func newTestRunCtx(t testing.TB, cfgStore config.Store) (rc *cli.RunContext, out
|
||||
require.NoError(t, err)
|
||||
cfgStore = &config.YAMLFileStore{Path: filepath.Join(cfgDir, "sq.yml")}
|
||||
cfg = config.New()
|
||||
require.NoError(t, cfgStore.Save(cfg))
|
||||
require.NoError(t, cfgStore.Save(ctx, cfg))
|
||||
} else {
|
||||
cfg, err = cfgStore.Load()
|
||||
cfg, err = cfgStore.Load(ctx)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
@ -73,13 +74,13 @@ type Run struct {
|
||||
// newRun returns a new run instance for testing sq commands.
|
||||
// If from is non-nil, its config is used. This allows sequential
|
||||
// commands to use the same config.
|
||||
func newRun(t *testing.T, from *Run) *Run {
|
||||
func newRun(ctx context.Context, t *testing.T, from *Run) *Run {
|
||||
ru := &Run{t: t}
|
||||
var cfgStore config.Store
|
||||
if from != nil {
|
||||
cfgStore = from.rc.ConfigStore
|
||||
}
|
||||
ru.rc, ru.out, ru.errOut = newTestRunCtx(t, cfgStore)
|
||||
ru.rc, ru.out, ru.errOut = newTestRunCtx(ctx, t, cfgStore)
|
||||
return ru
|
||||
}
|
||||
|
||||
|
@ -7,6 +7,8 @@ import (
|
||||
"path/filepath"
|
||||
"sync"
|
||||
|
||||
"github.com/neilotoole/sq/libsq/core/lg/slogbuf"
|
||||
|
||||
"github.com/neilotoole/sq/cli/config"
|
||||
"github.com/neilotoole/sq/cli/flag"
|
||||
"github.com/neilotoole/sq/drivers/csv"
|
||||
@ -114,20 +116,25 @@ type RunContext struct {
|
||||
// example if there's a config error). We do this to provide
|
||||
// enough framework so that such an error can be logged or
|
||||
// printed per the normal mechanisms if at all possible.
|
||||
func newDefaultRunContext(_ context.Context, stdin *os.File,
|
||||
func newDefaultRunContext(ctx context.Context, stdin *os.File,
|
||||
stdout, stderr io.Writer, args []string,
|
||||
) (*RunContext, error) {
|
||||
// logbuf holds log records until defaultLogging is completed.
|
||||
log, logbuf := slogbuf.New()
|
||||
|
||||
rc := &RunContext{
|
||||
Stdin: stdin,
|
||||
Out: stdout,
|
||||
ErrOut: stderr,
|
||||
}
|
||||
|
||||
cfg, cfgStore, configErr := config.DefaultLoad(args)
|
||||
cfg, cfgStore, configErr := config.DefaultLoad(lg.NewContext(ctx, log), args)
|
||||
rc.ConfigStore = cfgStore
|
||||
rc.Config = cfg
|
||||
|
||||
log, clnup, loggingErr := defaultLogging()
|
||||
log, logHandler, clnup, loggingErr := defaultLogging()
|
||||
|
||||
_ = logbuf.Flush(ctx, logHandler)
|
||||
rc.Log = log
|
||||
rc.clnup = clnup
|
||||
|
||||
|
@ -13,6 +13,8 @@ import (
|
||||
"context"
|
||||
"sync"
|
||||
|
||||
"github.com/neilotoole/sq/libsq/core/errz"
|
||||
|
||||
"golang.org/x/exp/slog"
|
||||
)
|
||||
|
||||
@ -50,6 +52,10 @@ func (b *Buffer) append(h *handler, record slog.Record) {
|
||||
// log records to dest, Flush returns immediately (and does not write
|
||||
// any remaining records). The buffer drains, even if an error occurs.
|
||||
func (b *Buffer) Flush(ctx context.Context, dest slog.Handler) error {
|
||||
if dest == nil {
|
||||
return errz.New("flush log buffer: dest is nil")
|
||||
}
|
||||
|
||||
b.mu.Lock()
|
||||
defer b.mu.Unlock()
|
||||
|
||||
|
@ -238,7 +238,7 @@ func (h *Helper) Source(handle string) *source.Source {
|
||||
// method also uses a cache. This is because this
|
||||
// method makes a copy the data file of file-based sources
|
||||
// as mentioned in the method godoc.
|
||||
h.coll = mustLoadCollection(t)
|
||||
h.coll = mustLoadCollection(h.Context, t)
|
||||
h.srcCache = map[string]*source.Source{}
|
||||
}
|
||||
|
||||
@ -691,14 +691,14 @@ func (h *Helper) DiffDB(src *source.Source) {
|
||||
})
|
||||
}
|
||||
|
||||
func mustLoadCollection(t testing.TB) *source.Collection {
|
||||
func mustLoadCollection(ctx context.Context, t testing.TB) *source.Collection {
|
||||
hookExpand := func(data []byte) ([]byte, error) {
|
||||
// expand vars such as "${SQ_ROOT}"
|
||||
return []byte(proj.Expand(string(data))), nil
|
||||
}
|
||||
|
||||
fs := &config.YAMLFileStore{Path: proj.Rel(testsrc.PathSrcsConfig), HookLoad: hookExpand}
|
||||
cfg, err := fs.Load()
|
||||
cfg, err := fs.Load(ctx)
|
||||
require.NoError(t, err)
|
||||
require.NotNil(t, cfg)
|
||||
require.NotNil(t, cfg.Collection)
|
||||
|
Loading…
Reference in New Issue
Block a user