Improvements to source commands (#139)

* Expose source.Set.Data() method

* jsonw.writeJSON cleaned up

* sq add now respects --json

* Location strings are subject to more scrutiny

* Ignore .db files in project dir

* sq add is more restrictive about location string

* source.RedactedLocation now uses 'xxxxx' per stdlib url.URL.Redacted()

* Update changelog for v0.23.0

* typos
This commit is contained in:
Neil O'Toole 2022-12-31 20:17:44 -07:00 committed by GitHub
parent 1293f4a22b
commit ed9aa38a67
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
44 changed files with 612 additions and 222 deletions

2
.gitignore vendored
View File

@ -50,3 +50,5 @@ goreleaser-test.sh
/.vscode
/completions
/manpages
/cli/test.db
/*.db

View File

@ -23,6 +23,9 @@ run:
# This package is such a mess, and needs to be rewritten completely.
- cli/output/tablew/internal
# Non-committed scratch dir
- scratch
output:
sort-results: true

View File

@ -5,14 +5,37 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [v0.21.3] - 2022-12-30
## [v0.23.0] - 2022-12-31
### Added
- `sq ls` now respects `--json flag`.
- `sq rm` now respects `--json flag`.
- `sq add` now respects `--json flag`.
- CI pipeline now verifies install packages after publish.
### Changed
- `sq rm` can delete multiple sources.
- `sq rm` doesn't print output unless `--verbose`.
- Redacted snipped is now `xxxxx` instead of `****`, to match stdlib `url.URL.Redacted()`.
### Fixed
- Fixed crash on Fedora systems (needed `--tags=netgo`).
## [v0.21.3] - 2022-12-30
### Added
- `sq version` respects `--json` flag.
- `sq version` respects `--verbose` flag. It also shows less info when `-v` is not set.
- `sq version` respects `--verbose` flag.
- `sq version` shows `latest_version` info when `--verbose` and there's a newer version available.
### Changed
- `sq version` shows less info when `--verbose` is not set.
## [v0.20.0] - 2022-12-29
### Added
@ -100,6 +123,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- [#89]: Bug with SQL generated for joins.
[v0.23.0]: https://github.com/neilotoole/sq/compare/v0.21.3...v0.23.0
[v0.21.3]: https://github.com/neilotoole/sq/compare/v0.20.0...v0.21.3
[v0.20.0]: https://github.com/neilotoole/sq/compare/v0.19.0...v0.20.0
[v0.19.0]: https://github.com/neilotoole/sq/compare/v0.18.2...v0.19.0

View File

@ -590,6 +590,7 @@ func newWriters(log lg.Log, cmd *cobra.Command, defaults config.Defaults, out, e
// No format specified, use JSON
w.recordw = jsonw.NewStdRecordWriter(out2, fm)
w.metaw = jsonw.NewMetadataWriter(out2, fm)
w.srcw = jsonw.NewSourceWriter(out2, fm)
w.errw = jsonw.NewErrorWriter(log, errOut2, fm)
w.versionw = jsonw.NewVersionWriter(out2, fm)

View File

@ -141,7 +141,7 @@ func TestOutputRaw(t *testing.T) {
})
ru := newRun(t).add(*src).hush()
err = ru.exec("sql", "--raw", "--output="+outputPath, query)
err = ru.Exec("sql", "--raw", "--output="+outputPath, query)
require.NoError(t, err)
outputBytes, err := os.ReadFile(outputPath)
@ -152,7 +152,7 @@ func TestOutputRaw(t *testing.T) {
// 2. Now test that stdout also gets the same data
ru = newRun(t).add(*src)
err = ru.exec("sql", "--raw", query)
err = ru.Exec("sql", "--raw", query)
require.NoError(t, err)
require.Equal(t, wantBytes, ru.out.Bytes())
})

View File

@ -21,13 +21,13 @@ import (
func newSrcAddCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "add [--handle @HANDLE] [FLAGS] LOCATION",
Use: "add [--handle @HANDLE] LOCATION",
RunE: execSrcAdd,
Args: cobra.ExactArgs(1),
Short: "Add data source",
Long: `Add data source specified by LOCATION, and optionally identified by @HANDLE.
`,
Example: `When adding a data source, LOCATION is the only required arg.
Long: `Add data source specified by LOCATION, optionally identified by @HANDLE.`,
Example: `
When adding a data source, LOCATION is the only required arg.
# Add a postgres source with handle "@sakila_pg"
$ sq add -h @sakila_pg 'postgres://user:pass@localhost/sakila'
@ -94,10 +94,6 @@ More examples:
$ sq add 'postgres://user@localhost/sakila' -p
Password: ****
# Explicitly set flags
$ sq add --handle=@sakila_pg --driver=postgres 'postgres://user:pass@localhost/sakila'
@ -126,21 +122,19 @@ More examples:
cmd.Flags().StringP(flagHandle, flagHandleShort, "", flagHandleUsage)
cmd.Flags().BoolP(flagPasswordPrompt, flagPasswordPromptShort, false, flagPasswordPromptUsage)
cmd.Flags().Bool(flagSkipVerify, false, flagSkipVerifyUsage)
cmd.Flags().BoolP(flagJSON, flagJSONShort, false, flagJSONUsage)
return cmd
}
func execSrcAdd(cmd *cobra.Command, args []string) error {
rc := RunContextFrom(cmd.Context())
if len(args) != 1 {
return errz.Errorf(msgInvalidArgs)
}
cfg := rc.Config
loc := source.AbsLocation(strings.TrimSpace(args[0]))
var err error
var typ source.Type
if cmd.Flags().Changed(flagDriver) {
if cmdFlagChanged(cmd, flagDriver) {
val, _ := cmd.Flags().GetString(flagDriver)
typ = source.Type(strings.TrimSpace(val))
} else {
@ -158,7 +152,7 @@ func execSrcAdd(cmd *cobra.Command, args []string) error {
}
var handle string
if cmd.Flags().Changed(flagHandle) {
if cmdFlagChanged(cmd, flagHandle) {
handle, _ = cmd.Flags().GetString(flagHandle)
} else {
handle, err = source.SuggestHandle(typ, loc, cfg.Sources.Exists)
@ -181,7 +175,7 @@ func execSrcAdd(cmd *cobra.Command, args []string) error {
}
var opts options.Options
if cmd.Flags().Changed(flagSrcOptions) {
if cmdFlagChanged(cmd, flagSrcOptions) {
val, _ := cmd.Flags().GetString(flagSrcOptions)
val = strings.TrimSpace(val)
if val != "" {
@ -192,17 +186,11 @@ func execSrcAdd(cmd *cobra.Command, args []string) error {
}
}
// Special handling for SQLite, because it's a file-based SQL DB
// unlike the other SQL DBs sq supports so far.
// Both of these forms are allowed:
//
// $ sq add sqlite3:///path/to/sakila.db
// $ sq add /path/to/sakila.db
//
// The second form is particularly nice for bash completion etc.
if typ == sqlite3.Type {
if !strings.HasPrefix(loc, sqlite3.Prefix) {
loc = sqlite3.Prefix + loc
// Special handling for SQLite, because it's a file-based DB.
loc, err = sqlite3.MungeLocation(loc)
if err != nil {
return err
}
}

View File

@ -1,6 +1,7 @@
package cli_test
import (
"path/filepath"
"testing"
"github.com/neilotoole/sq/testh/tutil"
@ -79,7 +80,7 @@ func TestCmdAdd(t *testing.T) {
}
ru := newRun(t)
err := ru.exec(args...)
err := ru.Exec(args...)
if tc.wantErr {
require.Error(t, err)
return
@ -95,3 +96,18 @@ func TestCmdAdd(t *testing.T) {
})
}
}
// TestCmdAdd_SQLite_Path has additional tests for sqlite paths.
func TestCmdAdd_SQLite_Path(t *testing.T) {
const h1 = `@s1`
ru := newRun(t)
require.NoError(t, ru.Exec("add", "-j", "sqlite3://test.db", "-h", h1))
got := ru.BindMap()
absPath, err := filepath.Abs("test.db")
require.NoError(t, err)
wantLoc := "sqlite3://" + absPath
require.Equal(t, wantLoc, got["location"])
}

View File

@ -34,7 +34,6 @@ func newDriverListCmd() *cobra.Command {
cmd.Flags().BoolP(flagJSON, flagJSONShort, false, flagJSONUsage)
cmd.Flags().BoolP(flagTable, flagTableShort, false, flagTableUsage)
cmd.Flags().BoolP(flagHeader, flagHeaderShort, false, flagHeaderUsage)
cmd.Flags().BoolP(flagMonochrome, flagMonochromeShort, false, flagMonochromeUsage)
return cmd
}

View File

@ -41,7 +41,6 @@ If @HANDLE is not provided, the active data source is assumed.`,
cmd.Flags().BoolP(flagJSON, flagJSONShort, false, flagJSONUsage)
cmd.Flags().BoolP(flagTable, flagTableShort, false, flagTableUsage)
cmd.Flags().Bool(flagInspectFull, false, flagInspectFullUsage)
return cmd
}
@ -135,9 +134,9 @@ func execInspect(cmd *cobra.Command, args []string) error {
return errz.Wrapf(err, "failed to read %s source metadata", src.Handle)
}
// This is a bit hacky, but it works... if not "--full", then just zap
// This is a bit hacky, but it works... if not "--verbose", then just zap
// the DBVars, as we usually don't want to see those
if !cmd.Flags().Changed(flagInspectFull) {
if !cmdFlagTrue(cmd, flagVerbose) {
meta.DBVars = nil
}

View File

@ -60,7 +60,7 @@ func TestCmdInspect(t *testing.T) {
ru := newRun(t).add(*src)
err := ru.exec("inspect", "--json")
err := ru.Exec("inspect", "--json")
if tc.wantErr {
require.Error(t, err)
return
@ -81,20 +81,20 @@ func TestCmdInspectSmoke(t *testing.T) {
src := th.Source(sakila.SL3)
ru := newRun(t)
err := ru.exec("inspect")
err := ru.Exec("inspect")
require.Error(t, err, "should fail because no active src")
ru = newRun(t)
ru.add(*src) // now have an active src
err = ru.exec("inspect", "--json")
err = ru.Exec("inspect", "--json")
require.NoError(t, err, "should pass because there is an active src")
md := &source.Metadata{}
require.NoError(t, json.Unmarshal(ru.out.Bytes(), md))
require.Equal(t, sqlite3.Type, md.SourceType)
require.Equal(t, sakila.SL3, md.Handle)
require.Equal(t, src.Location, md.Location)
require.Equal(t, src.RedactedLocation(), md.Location)
require.Equal(t, sakila.AllTblsViews(), md.TableNames())
// Try one more source for good measure
@ -102,7 +102,7 @@ func TestCmdInspectSmoke(t *testing.T) {
src = th.Source(sakila.CSVActor)
ru.add(*src)
err = ru.exec("inspect", "--json", src.Handle)
err = ru.Exec("inspect", "--json", src.Handle)
require.NoError(t, err)
md = &source.Metadata{}
@ -134,7 +134,7 @@ func TestCmdInspect_Stdin(t *testing.T) {
ru := newRun(t)
ru.rc.Stdin = f
err = ru.exec("inspect", "--json")
err = ru.Exec("inspect", "--json")
if tc.wantErr {
require.Error(t, err)
return

View File

@ -14,6 +14,7 @@ func newSrcListCmd() *cobra.Command {
}
cmd.Flags().BoolP(flagHeader, flagHeaderShort, false, flagHeaderUsage)
cmd.Flags().BoolP(flagJSON, flagJSONShort, false, flagJSONUsage)
return cmd
}

View File

@ -15,39 +15,39 @@ import (
func TestCmdPing(t *testing.T) {
t.Parallel()
err := newRun(t).exec("ping")
err := newRun(t).Exec("ping")
require.Error(t, err, "no active data source")
err = newRun(t).exec("ping", "invalid_handle")
err = newRun(t).Exec("ping", "invalid_handle")
require.Error(t, err)
err = newRun(t).exec("ping", "@not_a_handle")
err = newRun(t).Exec("ping", "@not_a_handle")
require.Error(t, err)
var ru *run
var ru *Run
th := testh.New(t)
src1, src2 := th.Source(sakila.CSVActor), th.Source(sakila.CSVActorNoHeader)
ru = newRun(t).add(*src1)
err = ru.exec("ping", "--csv", src1.Handle)
err = ru.Exec("ping", "--csv", src1.Handle)
require.NoError(t, err)
checkPingOutputCSV(t, ru, *src1)
ru = newRun(t).add(*src2)
err = ru.exec("ping", "--csv", src2.Handle)
err = ru.Exec("ping", "--csv", src2.Handle)
require.NoError(t, err)
checkPingOutputCSV(t, ru, *src2)
ru = newRun(t).add(*src1, *src2)
err = ru.exec("ping", "--csv", src1.Handle, src2.Handle)
err = ru.Exec("ping", "--csv", src1.Handle, src2.Handle)
require.NoError(t, err)
checkPingOutputCSV(t, ru, *src1, *src2)
}
// checkPintOutputCSV reads CSV records from h.out, and verifies
// that there's an appropriate record for each of srcs.
func checkPingOutputCSV(t *testing.T, h *run, srcs ...source.Source) {
func checkPingOutputCSV(t *testing.T, h *Run, srcs ...source.Source) {
recs, err := csv.NewReader(h.out).ReadAll()
require.NoError(t, err)
require.Equal(t, len(srcs), len(recs))

View File

@ -1,45 +1,54 @@
package cli
import (
"fmt"
"github.com/neilotoole/sq/libsq/source"
"github.com/samber/lo"
"github.com/spf13/cobra"
)
func newSrcRemoveCmd() *cobra.Command {
cmd := &cobra.Command{
Use: "rm @HANDLE",
Example: ` $ sq rm @my1`,
Use: "rm @HANDLE1 [@HANDLE2...]",
Example: ` # Remove @my1 data source
$ sq rm @my1
# Remove multiple data sources
$ sq rm @my1 @pg1 @sqlserver1`,
Short: "Remove data source",
Args: cobra.ExactArgs(1),
Long: "Remove data source.",
Args: cobra.MinimumNArgs(1),
RunE: execSrcRemove,
ValidArgsFunction: completeHandle(1),
ValidArgsFunction: completeHandle(0),
}
cmd.Flags().BoolP(flagJSON, flagJSONShort, false, flagJSONUsage)
return cmd
}
func execSrcRemove(cmd *cobra.Command, args []string) error {
rc := RunContextFrom(cmd.Context())
cfg := rc.Config
src, err := cfg.Sources.Get(args[0])
if err != nil {
args = lo.Uniq(args)
srcs := make([]*source.Source, len(args))
for i := range args {
src, err := cfg.Sources.Get(args[i])
if err != nil {
return err
}
err = cfg.Sources.Remove(src.Handle)
if err != nil {
return err
}
srcs[i] = src
}
if err := rc.ConfigStore.Save(cfg); err != nil {
return err
}
err = cfg.Sources.Remove(src.Handle)
if err != nil {
return err
}
err = rc.ConfigStore.Save(cfg)
if err != nil {
return err
}
fmt.Fprintf(rc.Out, "Removed data source ")
_, _ = rc.writers.fm.Hilite.Fprintf(rc.Out, "%s", src.Handle)
fmt.Fprintln(rc.Out)
return nil
return rc.writers.srcw.Removed(srcs...)
}

View File

@ -14,7 +14,7 @@ func TestCmdRemove(t *testing.T) {
// 1. Should fail if bad handle
ru := newRun(t)
err := ru.exec("rm", "@not_a_source")
err := ru.Exec("rm", "@not_a_source")
require.Error(t, err)
// 2. Check normal operation
@ -26,7 +26,7 @@ func TestCmdRemove(t *testing.T) {
require.NotNil(t, activeSrc)
require.Equal(t, src.Handle, activeSrc.Handle)
err = ru.exec("rm", src.Handle)
err = ru.Exec("rm", src.Handle)
require.NoError(t, err)
activeSrc = ru.rc.Config.Sources.Active()

View File

@ -88,6 +88,10 @@ func execSLQ(cmd *cobra.Command, args []string) error {
return errz.Wrapf(err, "invalid --%s value", flagInsert)
}
if destTbl == "" {
return errz.Errorf("invalid value for --%s: must be @HANDLE.TABLE", flagInsert)
}
destSrc, err := srcs.Get(destHandle)
if err != nil {
return err

View File

@ -37,7 +37,7 @@ func TestCmdSLQ_Insert_Create(t *testing.T) {
cols := stringz.PrefixSlice(sakila.TblActorCols(), ".")
query := fmt.Sprintf("%s.%s | %s", originSrc.Handle, srcTbl, strings.Join(cols, ", "))
err := ru.exec("slq", "--insert="+insertTo, query)
err := ru.Exec("slq", "--insert="+insertTo, query)
require.NoError(t, err)
sink, err := th.QuerySQL(destSrc, "select * from "+destTbl)
@ -77,7 +77,7 @@ func TestCmdSLQ_Insert(t *testing.T) {
cols := stringz.PrefixSlice(sakila.TblActorCols(), ".")
query := fmt.Sprintf("%s.%s | %s", originSrc.Handle, srcTbl, strings.Join(cols, ", "))
err := ru.exec("slq", "--insert="+insertTo, query)
err := ru.Exec("slq", "--insert="+insertTo, query)
require.NoError(t, err)
sink, err := th.QuerySQL(destSrc, "select * from "+tblName)
@ -94,7 +94,7 @@ func TestCmdSLQ_CSV(t *testing.T) {
src := testh.New(t).Source(sakila.CSVActor)
ru := newRun(t).add(*src)
err := ru.exec("slq", "--header=false", "--csv", fmt.Sprintf("%s.data", src.Handle))
err := ru.Exec("slq", "--header=false", "--csv", fmt.Sprintf("%s.data", src.Handle))
require.NoError(t, err)
recs := ru.mustReadCSV()
@ -115,7 +115,7 @@ func TestCmdSLQ_OutputFlag(t *testing.T) {
assert.NoError(t, os.Remove(outputFile.Name()))
})
err = ru.exec("slq",
err = ru.Exec("slq",
"--header=false", "--csv", fmt.Sprintf("%s.%s", src.Handle, sakila.TblActor),
"--output", outputFile.Name())
require.NoError(t, err)
@ -150,7 +150,7 @@ func TestCmdSLQ_Join(t *testing.T) {
query := fmt.Sprintf(queryTpl, src1.Handle, src2.Handle, sakila.MillerCustID)
err := ru.exec("slq", "--header=false", "--csv", query)
err := ru.Exec("slq", "--header=false", "--csv", query)
require.NoError(t, err)
recs := ru.mustReadCSV()
@ -174,7 +174,7 @@ func TestCmdSLQ_ActiveSrcHandle(t *testing.T) {
ru := newRun(t).add(*src).hush()
require.Equal(t, src.Handle, ru.rc.Config.Sources.Active().Handle)
err := ru.exec("slq", "--header=false", "--csv", "@sakila_sl3.actor")
err := ru.Exec("slq", "--header=false", "--csv", "@sakila_sl3.actor")
require.NoError(t, err)
recs := ru.mustReadCSV()
require.Equal(t, sakila.TblActorCount, len(recs))
@ -182,7 +182,7 @@ func TestCmdSLQ_ActiveSrcHandle(t *testing.T) {
// 2. Verify that it works using source.ActiveHandle as the src handle
ru = newRun(t).add(*src).hush()
require.Equal(t, src.Handle, ru.rc.Config.Sources.Active().Handle)
err = ru.exec("slq", "--header=false", "--csv", source.ActiveHandle+".actor")
err = ru.Exec("slq", "--header=false", "--csv", source.ActiveHandle+".actor")
require.NoError(t, err)
recs = ru.mustReadCSV()
require.Equal(t, sakila.TblActorCount, len(recs))

View File

@ -25,7 +25,7 @@ source's SQL dialect. Use flag --src=@HANDLE to specify an alternative
source.
If flag --query is set, sq will run the input as a query
(SELECT) and return the query rows. If flag --exec is set,
(SELECT) and return the query rows. If flag --Exec is set,
sq will execute the input and return the result. If neither
flag is set, sq attempts to determine the appropriate mode.`,
RunE: execSQL,
@ -36,7 +36,7 @@ flag is set, sq attempts to determine the appropriate mode.`,
$ sq sql --src=@sakila_pg12 'SELECT * FROM actor'
# Drop table @sakila_pg12.actor
$ sq sql --exec --src=@sakila_pg12 'DROP TABLE actor'
$ sq sql --Exec --src=@sakila_pg12 'DROP TABLE actor'
# Select from active source and write results to @sakila_ms17.actor
$ sq sql 'SELECT * FROM actor' --insert=@sakila_ms17.actor`,

View File

@ -52,7 +52,7 @@ func TestCmdSQL_Insert(t *testing.T) {
insertTo := fmt.Sprintf("%s.%s", destSrc.Handle, tblName)
query := fmt.Sprintf("SELECT %s FROM %s", strings.Join(sakila.TblActorCols(), ", "), originTbl)
err := ru.exec("sql", "--insert="+insertTo, query)
err := ru.Exec("sql", "--insert="+insertTo, query)
require.NoError(t, err)
sink, err := th.QuerySQL(destSrc, "select * from "+tblName)
@ -100,7 +100,7 @@ func TestCmdSQL_SelectFromUserDriver(t *testing.T) {
}
ru.rc.Config.Ext.UserDrivers = append(ru.rc.Config.Ext.UserDrivers, udDefs...)
err := ru.exec("sql", "--csv", "--header=false", "SELECT * FROM "+wantTbl.tblName)
err := ru.Exec("sql", "--csv", "--header=false", "SELECT * FROM "+wantTbl.tblName)
require.NoError(t, err)
recs := ru.mustReadCSV()
require.Equal(t, wantTbl.wantRows, len(recs), "expected %d rows in tbl %q but got %s", wantTbl.wantRows,
@ -152,7 +152,7 @@ func TestCmdSQL_StdinQuery(t *testing.T) {
args = append(args, "--opts=header=true")
}
err = ru.exec(args...)
err = ru.Exec(args...)
if tc.wantErr {
require.Error(t, err)
return

View File

@ -20,6 +20,8 @@ func newSrcCommand() *cobra.Command {
source. Otherwise, set @HANDLE as the active data source.`,
}
cmd.Flags().BoolP(flagJSON, flagJSONShort, false, flagJSONUsage)
return cmd
}

View File

@ -25,7 +25,7 @@ func TestCmdTblCopy(t *testing.T) {
destTbl1 := stringz.UniqTableName(sakila.TblActor)
ru1 := newRun(t).add(*src)
err := ru1.exec("tbl", "copy", "--data=false", srcTblHandle, src.Handle+"."+destTbl1)
err := ru1.Exec("tbl", "copy", "--data=false", srcTblHandle, src.Handle+"."+destTbl1)
require.NoError(t, err)
defer th.DropTable(src, destTbl1)
require.Equal(t, int64(0), th.RowCount(src, destTbl1),
@ -34,7 +34,7 @@ func TestCmdTblCopy(t *testing.T) {
// --data=true
ru2 := newRun(t).add(*src)
destTbl2 := stringz.UniqTableName(sakila.TblActor)
err = ru2.exec("tbl", "copy", "--data=true", srcTblHandle, src.Handle+"."+destTbl2)
err = ru2.Exec("tbl", "copy", "--data=true", srcTblHandle, src.Handle+"."+destTbl2)
require.NoError(t, err)
defer th.DropTable(src, destTbl2)
require.Equal(t, int64(sakila.TblActorCount), th.RowCount(src, destTbl2),
@ -68,7 +68,7 @@ func TestCmdTblDrop(t *testing.T) {
require.Equal(t, destTblName, tblMeta.Name)
require.Equal(t, int64(sakila.TblActorCount), tblMeta.RowCount)
err = newRun(t).add(*src).exec("tbl", "drop", src.Handle+"."+destTblName)
err = newRun(t).add(*src).Exec("tbl", "drop", src.Handle+"."+destTblName)
require.NoError(t, err)
needsDrop = false
@ -98,7 +98,7 @@ func TestCmdTblTruncate(t *testing.T) {
require.Equal(t, destTblName, tblMeta.Name)
require.Equal(t, int64(sakila.TblActorCount), tblMeta.RowCount)
err = newRun(t).add(*src).exec("tbl", "truncate", src.Handle+"."+destTblName)
err = newRun(t).add(*src).Exec("tbl", "truncate", src.Handle+"."+destTblName)
require.NoError(t, err)
tblMeta, err = th.Open(src).TableMetadata(th.Context, destTblName)
require.NoError(t, err)

View File

@ -74,18 +74,18 @@ func fetchBrewVersion(ctx context.Context) (string, error) {
resp, err := http.DefaultClient.Do(req)
if err != nil {
return "", errz.Wrap(err, "failed to check edgectl brew repo")
return "", errz.Wrap(err, "failed to check sq brew repo")
}
defer resp.Body.Close()
if resp.StatusCode != http.StatusOK {
return "", errz.Errorf("failed to check edgectl brew repo: %d %s",
return "", errz.Errorf("failed to check sq brew repo: %d %s",
resp.StatusCode, http.StatusText(resp.StatusCode))
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return "", errz.Wrap(err, "failed to read edgectl brew repo body")
return "", errz.Wrap(err, "failed to read sq brew repo body")
}
return getVersionFromBrewFormula(body)

View File

@ -4,6 +4,7 @@ import (
"context"
"strings"
"github.com/samber/lo"
"github.com/spf13/cobra"
"github.com/neilotoole/sq/libsq/source"
@ -30,6 +31,8 @@ func completeHandle(max int) completionFunc {
rc := RunContextFrom(cmd.Context())
handles := rc.Config.Sources.Handles()
handles, _ = lo.Difference(handles, args)
return handles, cobra.ShellCompDirectiveNoFileComp
}
}

View File

@ -29,9 +29,6 @@ const (
flagInsert = "insert"
flagInsertUsage = "Insert query results into @HANDLE.TABLE. If not existing, TABLE will be created."
flagInspectFull = "full"
flagInspectFullUsage = "Output full data source details (JSON only)"
flagJSON = "json"
flagJSONUsage = "Output JSON"
flagJSONShort = "j"
@ -67,7 +64,7 @@ const (
flagRawShort = "r"
flagRawUsage = "Output each record field in raw format without any encoding or delimiter"
flagSQLExec = "exec"
flagSQLExec = "Exec"
flagSQLExecUsage = "Execute the SQL as a statement (as opposed to query)"
flagSQLQuery = "query"

View File

@ -3,7 +3,6 @@ package jsonw
import (
"bytes"
"fmt"
"io"
"strings"
@ -17,9 +16,7 @@ import (
// writeJSON prints a JSON representation of v to out, using specs
// from fm.
func writeJSON(out io.Writer, fm *output.Formatting, v any) error {
buf := &bytes.Buffer{}
enc := jcolorenc.NewEncoder(buf)
enc := jcolorenc.NewEncoder(out)
enc.SetColors(internal.NewColors(fm))
enc.SetEscapeHTML(false)
if fm.Pretty {
@ -31,11 +28,6 @@ func writeJSON(out io.Writer, fm *output.Formatting, v any) error {
return errz.Err(err)
}
_, err = fmt.Fprint(out, buf.String())
if err != nil {
return errz.Err(err)
}
return nil
}

View File

@ -1,14 +1,9 @@
package jsonw
import (
"bytes"
"fmt"
"io"
"github.com/neilotoole/sq/cli/output"
"github.com/neilotoole/sq/cli/output/jsonw/internal"
jcolorenc "github.com/neilotoole/sq/cli/output/jsonw/internal/jcolorenc"
"github.com/neilotoole/sq/libsq/core/errz"
"github.com/neilotoole/sq/libsq/driver"
"github.com/neilotoole/sq/libsq/source"
)
@ -25,40 +20,20 @@ func NewMetadataWriter(out io.Writer, fm *output.Formatting) output.MetadataWrit
return &mdWriter{out: out, fm: fm}
}
func (w *mdWriter) write(v any) error {
buf := &bytes.Buffer{}
enc := jcolorenc.NewEncoder(buf)
enc.SetColors(internal.NewColors(w.fm))
enc.SetEscapeHTML(false)
if w.fm.Pretty {
enc.SetIndent("", w.fm.Indent)
}
err := enc.Encode(v)
if err != nil {
return errz.Err(err)
}
_, err = fmt.Fprint(w.out, buf.String())
if err != nil {
return errz.Err(err)
}
return nil
}
// DriverMetadata implements output.MetadataWriter.
func (w *mdWriter) DriverMetadata(md []driver.Metadata) error {
return w.write(md)
return writeJSON(w.out, w.fm, md)
}
// TableMetadata implements output.MetadataWriter.
func (w *mdWriter) TableMetadata(md *source.TableMetadata) error {
return w.write(md)
return writeJSON(w.out, w.fm, md)
}
// SourceMetadata implements output.MetadataWriter.
func (w *mdWriter) SourceMetadata(md *source.Metadata) error {
return w.write(md)
md2 := *md // Shallow copy is fine
md2.Location = source.RedactLocation(md2.Location)
return writeJSON(w.out, w.fm, &md2)
}

View File

@ -0,0 +1,61 @@
package jsonw
import (
"io"
"github.com/neilotoole/sq/cli/output"
"github.com/neilotoole/sq/libsq/source"
)
var _ output.SourceWriter = (*sourceWriter)(nil)
type sourceWriter struct {
out io.Writer
fm *output.Formatting
}
// NewSourceWriter returns a source writer that outputs source
// details in text table format.
func NewSourceWriter(out io.Writer, fm *output.Formatting) output.SourceWriter {
return &sourceWriter{out: out, fm: fm}
}
// SourceSet implements output.SourceWriter.
func (w *sourceWriter) SourceSet(ss *source.Set) error {
if ss == nil {
return nil
}
ss = ss.Clone()
items := ss.Items()
for i := range items {
items[i].Location = items[i].RedactedLocation()
}
return writeJSON(w.out, w.fm, ss.Data())
}
// Source implements output.SourceWriter.
func (w *sourceWriter) Source(src *source.Source) error {
if src == nil {
return nil
}
src = src.Clone()
src.Location = src.RedactedLocation()
return writeJSON(w.out, w.fm, src)
}
// Removed implements output.SourceWriter.
func (w *sourceWriter) Removed(srcs ...*source.Source) error {
if !w.fm.Verbose || len(srcs) == 0 {
return nil
}
srcs2 := make([]*source.Source, len(srcs))
for i := range srcs {
srcs2[i] = srcs[i].Clone()
srcs2[i].Location = srcs2[i].RedactedLocation()
}
return writeJSON(w.out, w.fm, srcs2)
}

View File

@ -116,6 +116,24 @@ func (w *sourceWriter) Source(src *source.Source) error {
return nil
}
// Removed implements output.SourceWriter.
func (w *sourceWriter) Removed(srcs ...*source.Source) error {
if !w.verbose || len(srcs) == 0 {
return nil
}
fmt.Fprintf(w.tbl.out, "Removed: ")
for i, src := range srcs {
if i > 0 {
w.tbl.fm.Faint.Fprint(w.tbl.out, ", ")
}
w.tbl.fm.Handle.Fprint(w.tbl.out, src.Handle)
}
fmt.Fprintln(w.tbl.out)
return nil
}
func renderSrcOptions(src *source.Source) string {
if src == nil || src.Options == nil || len(src.Options) == 0 {
return ""

View File

@ -16,7 +16,7 @@ import (
)
// RecordWriter is an interface for writing records to a destination.
// In effect it is a synchronous counterpart to the asynchronous
// In effect, it is a synchronous counterpart to the asynchronous
// libsq.RecordWriter interface. Being a synchronous interface, it is
// less tricky to implement than libsq.RecordWriter. The RecordWriterAdapter
// type defined in this package bridges the two interfaces.
@ -60,6 +60,9 @@ type SourceWriter interface {
// Source outputs details of the source.
Source(src *source.Source) error
// Removed is called when sources are removed from the source set.
Removed(srcs ...*source.Source) error
}
// ErrorWriter outputs errors.

View File

@ -4,6 +4,7 @@ import (
"bytes"
"context"
"encoding/csv"
"encoding/json"
"os"
"strings"
"sync"
@ -15,7 +16,6 @@ import (
"github.com/neilotoole/sq/cli"
"github.com/neilotoole/sq/cli/config"
"github.com/neilotoole/sq/libsq/core/errz"
"github.com/neilotoole/sq/libsq/source"
)
@ -41,7 +41,7 @@ func newTestRunCtx(log lg.Log) (rc *cli.RunContext, out, errOut *bytes.Buffer) {
}
// run is a helper for testing sq commands.
type run struct {
type Run struct {
t *testing.T
mu sync.Mutex
rc *cli.RunContext
@ -54,8 +54,8 @@ type run struct {
}
// newRun returns a new run instance for testing sq commands.
func newRun(t *testing.T) *run {
ru := &run{t: t}
func newRun(t *testing.T) *Run {
ru := &Run{t: t}
ru.rc, ru.out, ru.errOut = newTestRunCtx(testlg.New(t))
return ru
}
@ -63,7 +63,7 @@ func newRun(t *testing.T) *run {
// add adds srcs to ru.rc.Config.Set. If the source set
// does not already have an active source, the first element
// of srcs is used.
func (ru *run) add(srcs ...source.Source) *run {
func (ru *Run) add(srcs ...source.Source) *Run {
ru.mu.Lock()
defer ru.mu.Unlock()
@ -87,24 +87,32 @@ func (ru *run) add(srcs ...source.Source) *run {
return ru
}
// exec executes the sq command specified by args. If the first
// Exec executes the sq command specified by args. If the first
// element of args is not "sq", that value is prepended to the
// args for execution. This method may only be invoked once.
// The backing RunContext will also be closed.
func (ru *run) exec(args ...string) error {
// The backing RunContext will also be closed. If an error
// occurs on the client side during execution, that error is returned.
// Either ru.out or ru.errOut will be filled, according to what the
// CLI outputs.
func (ru *Run) Exec(args ...string) error {
ru.mu.Lock()
defer ru.mu.Unlock()
if ru.used {
err := errz.New("run instance must only be used once")
ru.t.Fatal(err)
return err
}
return ru.doExec(args)
}
execErr := cli.ExecuteWith(context.Background(), ru.rc, args)
func (ru *Run) doExec(args []string) error {
defer func() { ru.used = true }()
require.False(ru.t, ru.used, "Run instance must only be used once")
ctx, cancelFn := context.WithCancel(context.Background())
ru.t.Cleanup(cancelFn)
execErr := cli.ExecuteWith(ctx, ru.rc, args)
if !ru.hushOutput {
// We log sq's output now (before calling rc.Close) because
// We log the CLI's output now (before calling rc.Close) because
// it reads better in testing's output that way.
if ru.out.Len() > 0 {
ru.t.Log(strings.TrimSuffix(ru.out.String(), "\n"))
@ -124,9 +132,27 @@ func (ru *run) exec(args ...string) error {
return closeErr
}
// Bind marshals Run.Out to v (as JSON), failing the test on any error.
func (ru *Run) Bind(v any) *Run {
ru.mu.Lock()
defer ru.mu.Unlock()
err := json.Unmarshal(ru.out.Bytes(), &v)
require.NoError(ru.t, err)
return ru
}
// BindMap is a convenience method for binding ru.Out to a map.
func (ru *Run) BindMap() map[string]any {
m := map[string]any{}
ru.Bind(&m)
return m
}
// mustReadCSV reads CSV from ru.out and returns all records,
// failing the testing on any problem.
func (ru *run) mustReadCSV() [][]string {
// failing the testing on any problem. Obviously the Exec call
// should have specified "--csv".
func (ru *Run) mustReadCSV() [][]string {
ru.mu.Lock()
defer ru.mu.Unlock()
@ -138,7 +164,7 @@ func (ru *run) mustReadCSV() [][]string {
// hush suppresses the printing of output collected in out
// and errOut to t.Log. Set to true for tests
// that output excessive content, binary files, etc.
func (ru *run) hush() *run {
func (ru *Run) hush() *Run {
ru.hushOutput = true
return ru
}

View File

@ -8,6 +8,7 @@ import (
"context"
"database/sql"
"fmt"
"net/url"
"os"
"path/filepath"
"strings"
@ -843,3 +844,40 @@ func PathFromLocation(src *source.Source) (string, error) {
loc = filepath.Clean(loc)
return loc, nil
}
// MungeLocation takes a location argument (as received from the user)
// and builds a sqlite3 location URL. Each of these forms are allowed:
//
// sqlite3:///path/to/sakila.db --> sqlite3:///path/to/sakila.db
// sqlite3:sakila.db --> sqlite3:///current/working/dir/sakila.db
// sqlite3:/sakila.db --> sqlite3:///sakila.db
// sqlite3:./sakila.db --> sqlite3:///current/working/dir/sakila.db
// sqlite3:sakila.db --> sqlite3:///current/working/dir/sakila.db
// sakila.db --> sqlite3:///current/working/dir/sakila.db
// /path/to/sakila.db --> sqlite3:///path/to/sakila.db
//
// The final form is particularly nice for shell completion etc.
func MungeLocation(loc string) (string, error) {
loc2 := strings.TrimSpace(loc)
if loc2 == "" {
return "", errz.New("location must not be empty")
}
loc2 = strings.TrimPrefix(loc2, "sqlite3://")
loc2 = strings.TrimPrefix(loc2, "sqlite3:")
// Now we should be left with just a path, which could be
// relative or absolute.
u, err := url.Parse(loc2)
if err != nil {
return "", errz.Wrapf(err, "invalid location: %s", loc)
}
u.Path, err = filepath.Abs(u.Path)
if err != nil {
return "", errz.Wrapf(err, "invalid location: %s", loc)
}
u.Scheme = "sqlite3"
return u.String(), nil
}

View File

@ -1,10 +1,12 @@
package sqlite3_test
import (
"os"
"path/filepath"
"testing"
_ "github.com/mattn/go-sqlite3"
"github.com/neilotoole/sq/testh/tutil"
"github.com/stretchr/testify/require"
"github.com/neilotoole/sq/drivers/sqlite3"
@ -223,3 +225,62 @@ func TestPathFromLocation(t *testing.T) {
})
}
}
func TestMungeLocation(t *testing.T) {
cwd, err := os.Getwd()
require.NoError(t, err)
cwdWant := "sqlite3://" + cwd + "/sakila.db"
testCases := []struct {
in string
want string
wantErr bool
}{
{
in: "",
wantErr: true,
},
{
in: "sqlite3:///path/to/sakila.db",
want: "sqlite3:///path/to/sakila.db",
},
{
in: "sqlite3://sakila.db",
want: cwdWant,
},
{
in: "sqlite3:sakila.db",
want: cwdWant,
},
{
in: "sqlite3:/sakila.db",
want: "sqlite3:///sakila.db",
},
{
in: "sakila.db",
want: cwdWant,
},
{
in: "./sakila.db",
want: cwdWant,
},
{
in: "/path/to/sakila.db",
want: "sqlite3:///path/to/sakila.db",
},
}
for _, tc := range testCases {
tc := tc
t.Run(tutil.Name(tc.in), func(t *testing.T) {
got, err := sqlite3.MungeLocation(tc.in)
if tc.wantErr {
require.Error(t, err)
return
}
require.NoError(t, err)
require.Equal(t, tc.want, got)
})
}
}

1
go.mod
View File

@ -56,6 +56,7 @@ require (
github.com/muesli/mango-pflag v0.1.0 // indirect
github.com/rivo/uniseg v0.4.3 // indirect
github.com/rogpeppe/go-internal v1.9.0 // indirect
github.com/samber/lo v1.37.0 // indirect
golang.org/x/crypto v0.4.0 // indirect
golang.org/x/exp v0.0.0-20221217163422-3c43f8badb15 // indirect
)

2
go.sum
View File

@ -233,6 +233,8 @@ github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQD
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryboe/q v1.0.18 h1:uTonPt1eZjy7GSpB0XpYpsCvX+Yf9f+M4CUKuH2r+vg=
github.com/ryboe/q v1.0.18/go.mod h1:elqvVf/GBuZHvZ9gvHv4MKM6NZAMz2rFajnTgQZ46wU=
github.com/samber/lo v1.37.0 h1:XjVcB8g6tgUp8rsPsJ2CvhClfImrpL04YpQHXeHPhRw=
github.com/samber/lo v1.37.0/go.mod h1:9vaz2O4o8oOnK23pd2TrXufcbdbJIa3b6cstBWKpopA=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/seccomp/libseccomp-golang v0.9.2-0.20220502022130-f33da4d89646/go.mod h1:JA8cRccbGaA1s33RQf7Y1+q9gHmZX1yB/z9WDN1C6fg=
github.com/segmentio/encoding v0.1.14 h1:BfnglNbNRohLaBLf93uP5/IwKqeWrezXK/g6IRnj75c=

View File

@ -19,6 +19,10 @@ import (
"github.com/neilotoole/sq/libsq/core/errz"
)
// Redacted is the "xxxxx" string used for redacted
// values, such as passwords.
const Redacted = "xxxxx"
func init() { //nolint:gochecknoinits
rand.Seed(time.Now().UnixNano())
}

View File

@ -7,7 +7,6 @@ import (
"net/url"
"os"
"path/filepath"
"strings"
"sync"
"time"
@ -497,33 +496,6 @@ func DetectMagicNumber(ctx context.Context, log lg.Log, openFn FileOpenFunc) (de
}
}
// AbsLocation returns the absolute path of loc. That is, relative
// paths etc loc loc are resolved. If loc is not a file path or
// it cannot be processed, loc is returned unmodified.
func AbsLocation(loc string) string {
if fpath, ok := isFpath(loc); ok {
return fpath
}
return loc
}
// isFpath returns the filepath and true if loc is a file path.
func isFpath(loc string) (fpath string, ok bool) {
// This is not exactly an industrial-strength algorithm...
if strings.Contains(loc, ":/") {
// Excludes "http:/" etc
return "", false
}
fpath, err := filepath.Abs(loc)
if err != nil {
return "", false
}
return fpath, true
}
// httpURL tests if s is a well-structured HTTP or HTTPS url, and
// if so, returns the url and true.
func httpURL(s string) (u *url.URL, ok bool) {

View File

@ -285,3 +285,35 @@ func parseLoc(loc string) (*parsedLoc, error) {
return ploc, nil
}
// AbsLocation returns the absolute path of loc. That is, relative
// paths etc. are resolved. If loc is not a file path or
// it cannot be processed, loc is returned unmodified.
func AbsLocation(loc string) string {
if fpath, ok := isFpath(loc); ok {
return fpath
}
return loc
}
// isFpath returns the absolute filepath and true if loc is a file path.
func isFpath(loc string) (fpath string, ok bool) {
// This is not exactly an industrial-strength algorithm...
if strings.Contains(loc, ":/") {
// Excludes "http:/" etc
return "", false
}
if strings.Contains(loc, ":") {
// Excludes "sqlite:my_file.db"
return "", false
}
fpath, err := filepath.Abs(loc)
if err != nil {
return "", false
}
return fpath, true
}

View File

@ -12,6 +12,10 @@ type Metadata struct {
// Handle is the source handle.
Handle string `json:"handle"`
// Location is the source location such as a DB connection string,
// a file path, or a URL.
Location string `json:"location"`
// Name is the base name of the source, e.g. the base filename
// or DB name etc. For example, "sakila".
Name string `json:"name"`
@ -33,13 +37,6 @@ type Metadata struct {
// DBVersion is the DB version.
DBVersion string `json:"db_version"`
// DBVars are configuration name-value pairs from the DB.
DBVars []DBVar `json:"db_variables,omitempty"`
// Location is the source location such as a DB connection string,
// a file path, or a URL.
Location string `json:"location"`
// User is the username, if applicable.
User string `json:"user,omitempty"`
@ -48,6 +45,44 @@ type Metadata struct {
// Tables is the metadata for each table loc the source.
Tables []*TableMetadata `json:"tables"`
// DBVars are configuration name-value pairs from the DB.
DBVars []DBVar `json:"db_variables,omitempty"`
}
// Clone returns a deep copy of md. If md is nil, nil is returned.
func (md *Metadata) Clone() *Metadata {
if md == nil {
return md
}
c := &Metadata{
Handle: md.Handle,
Location: md.Location,
Name: md.Name,
FQName: md.FQName,
SourceType: md.SourceType,
DBDriverType: md.DBDriverType,
DBProduct: md.DBProduct,
DBVersion: md.DBVersion,
User: md.User,
Size: md.Size,
Tables: nil,
DBVars: nil,
}
if md.DBVars != nil {
copy(c.DBVars, md.DBVars)
}
if md.Tables != nil {
c.Tables = make([]*TableMetadata, len(md.Tables))
for i := range md.Tables {
c.Tables[i] = md.Tables[i].Clone()
}
}
return c
}
// TableNames is a convenience method that returns md's table names.
@ -107,6 +142,33 @@ func (t *TableMetadata) String() string {
return string(bytes)
}
// Clone returns a deep copy of t. If t is nil, nil is returned.
func (t *TableMetadata) Clone() *TableMetadata {
if t == nil {
return nil
}
c := &TableMetadata{
Name: t.Name,
FQName: t.FQName,
TableType: t.TableType,
DBTableType: t.DBTableType,
RowCount: t.RowCount,
Size: t.Size,
Comment: t.Comment,
Columns: nil,
}
if t.Columns != nil {
c.Columns = make([]*ColMetadata, len(t.Columns))
for i := range t.Columns {
c.Columns[i] = t.Columns[i].Clone()
}
}
return c
}
// Column returns the named col or nil.
func (t *TableMetadata) Column(colName string) *ColMetadata {
for _, col := range t.Columns {
@ -145,6 +207,25 @@ type ColMetadata struct {
// TODO: Add foreign key field
}
// Clone returns a deep copy of c. If c is nil, nil is returned.
func (c *ColMetadata) Clone() *ColMetadata {
if c == nil {
return nil
}
return &ColMetadata{
Name: c.Name,
Position: c.Position,
PrimaryKey: c.PrimaryKey,
BaseType: c.BaseType,
ColumnType: c.ColumnType,
Kind: c.Kind,
Nullable: c.Nullable,
DefaultValue: c.DefaultValue,
Comment: c.Comment,
}
}
func (c *ColMetadata) String() string {
bytes, _ := json.Marshal(c)
return string(bytes)

View File

@ -22,14 +22,33 @@ type Set struct {
}
// setData holds Set's for the purposes of serialization
// to YAML etc (we don't want to expose setData's exported
// to YAML etc. (we don't want to expose setData's exported
// fields directly on Set.)
//
// This seemed like a good idea t the time, but probably wasn't.
type setData struct {
ActiveSrc string `yaml:"active" json:"active"`
ScratchSrc string `yaml:"scratch" json:"scratch"`
Items []*Source `yaml:"items" json:"items"`
}
// Data returns the internal representation of the set data.
// This is a filthy hack so that the internal data can be passed
// directly to sq's colorizing json encoder (it can't handle colorization
// of values that implement json.Marshaler).
//
// There are two long-term solutions here:
// 1. The color encoder needs to be able to handle json.RawMessage.
// 2. Refactor source.Set so that it doesn't have this weird internal
// representation.
func (s *Set) Data() any {
if s == nil {
return nil
}
return s.data
}
// MarshalJSON implements json.Marshaler.
func (s *Set) MarshalJSON() ([]byte, error) {
s.mu.Lock()
@ -64,9 +83,6 @@ func (s *Set) UnmarshalYAML(unmarshal func(any) error) error {
// Items returns the sources as a slice.
func (s *Set) Items() []*Source {
s.mu.Lock()
defer s.mu.Unlock()
return s.data.Items
}
@ -270,6 +286,31 @@ func (s *Set) Handles() []string {
return handles
}
// Clone returns a deep copy of s. If s is nil, nil is returned.
func (s *Set) Clone() *Set {
if s == nil {
return nil
}
s.mu.Lock()
defer s.mu.Unlock()
data := setData{
ActiveSrc: s.data.ActiveSrc,
ScratchSrc: s.data.ScratchSrc,
Items: make([]*Source, len(s.data.Items)),
}
for i, src := range s.data.Items {
data.Items[i] = src.Clone()
}
return &Set{
mu: sync.Mutex{},
data: data,
}
}
// VerifySetIntegrity verifies the internal state of s.
// Typically this func is invoked after s has been loaded
// from config, verifying that the config is not corrupt.

View File

@ -70,33 +70,48 @@ func (s *Source) RedactedLocation() string {
return RedactLocation(s.Location)
}
// Clone returns a deep copy of s. If s is nil, nil is returned.
func (s *Source) Clone() *Source {
if s == nil {
return nil
}
return &Source{
Handle: s.Handle,
Type: s.Type,
Location: s.Location,
Options: s.Options.Clone(),
}
}
// RedactLocation returns a redacted version of the source
// location loc, with the password component (if any) of
// the location masked.
func RedactLocation(loc string) string {
switch {
case loc == "":
return ""
case strings.HasPrefix(loc, "/"):
// It's a file
case loc == "",
strings.HasPrefix(loc, "/"),
strings.HasPrefix(loc, "sqlite3:///"):
return loc
case strings.HasPrefix(loc, "http://"), strings.HasPrefix(loc, "https://"):
// TODO: technically a HTTP url could have a user:password component that could be masked
return loc
u, err := url.ParseRequestURI(loc)
if err != nil {
// If we can't parse it, just return the original loc
return loc
}
return u.Redacted()
}
// At this point, we expect it's a DSN
u, err := dburl.Parse(loc)
dbu, err := dburl.Parse(loc)
if err != nil {
// Shouldn't happen, but if it does, simply return the
// unmodified loc.
return loc
}
// We want to mask the password, but our preferred ****
// text gets URL encoded, so we'll make this a two-step process.
u.User = url.UserPassword(u.User.Username(), "password")
return strings.Replace(u.String(), "password", "****", 1)
return dbu.Redacted()
}
// ShortLocation returns a short location string. For example, the

View File

@ -1,44 +1,62 @@
package source_test
import (
"strings"
"testing"
"github.com/neilotoole/sq/libsq/core/stringz"
"github.com/neilotoole/sq/testh/tutil"
"github.com/stretchr/testify/require"
"github.com/neilotoole/sq/libsq/source"
)
func TestWubble(t *testing.T) {
t.Log(strings.ToUpper(stringz.Uniq32()))
}
func TestRedactedLocation(t *testing.T) {
testCases := []struct {
tname string
loc string
want string
loc string
want string
}{
{tname: "sqlite", loc: "/path/to/sqlite.db", want: "/path/to/sqlite.db"},
{tname: "xlsx", loc: "/path/to/data.xlsx", want: "/path/to/data.xlsx"},
{tname: "https", loc: "https://path/to/data.xlsx", want: "https://path/to/data.xlsx"},
{tname: "http", loc: "http://path/to/data.xlsx", want: "http://path/to/data.xlsx"},
{
tname: "sqlserver", loc: "sqlserver://sq:p_ssW0rd@localhost?database=sqtest",
want: "sqlserver://sq:****@localhost?database=sqtest",
loc: "/path/to/sqlite.db",
want: "/path/to/sqlite.db",
},
{
tname: "postgres", loc: "postgres://sq:p_ssW0rd@localhost/sqtest?sslmode=disable",
want: "postgres://sq:****@localhost/sqtest?sslmode=disable",
loc: "/path/to/data.xlsx",
want: "/path/to/data.xlsx",
},
{
tname: "mysql", loc: "mysql://sq:p_ssW0rd@localhost:3306/sqtest",
want: "mysql://sq:****@localhost:3306/sqtest",
loc: "https://path/to/data.xlsx",
want: "https://path/to/data.xlsx",
},
{
tname: "sqlite3", loc: "sqlite3:///path/to/sqlite.db",
want: "sqlite3:/path/to/sqlite.db",
loc: "http://path/to/data.xlsx",
want: "http://path/to/data.xlsx",
},
{
loc: "sqlserver://sq:p_ssW0rd@localhost?database=sqtest",
want: "sqlserver://sq:xxxxx@localhost?database=sqtest",
},
{
loc: "postgres://sq:p_ssW0rd@localhost/sqtest?sslmode=disable",
want: "postgres://sq:xxxxx@localhost/sqtest?sslmode=disable",
},
{
loc: "mysql://sq:p_ssW0rd@localhost:3306/sqtest",
want: "mysql://sq:xxxxx@localhost:3306/sqtest",
},
{
loc: "sqlite3:///path/to/sqlite.db",
want: "sqlite3:///path/to/sqlite.db",
},
}
for _, tc := range testCases {
tc := tc
t.Run(tc.tname, func(t *testing.T) {
t.Run(tutil.Name(tc.loc), func(t *testing.T) {
src := &source.Source{Location: tc.loc}
got := src.RedactedLocation()
t.Logf("%s --> %s", src.Location, got)

View File

@ -216,6 +216,8 @@ func Name(args ...any) string {
}
s = strings.ReplaceAll(s, "/", "_")
s = strings.ReplaceAll(s, ":", "_")
s = strings.ReplaceAll(s, `\`, "_")
s = stringz.TrimLen(s, 40) // we don't want it to be too long
parts = append(parts, s)
}