2020-08-06 20:58:47 +03:00
|
|
|
package cli_test
|
|
|
|
|
|
|
|
import (
|
2023-11-19 03:05:48 +03:00
|
|
|
"context"
|
2020-08-06 20:58:47 +03:00
|
|
|
"encoding/csv"
|
|
|
|
"fmt"
|
|
|
|
"os"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
"testing"
|
|
|
|
|
|
|
|
"github.com/stretchr/testify/assert"
|
|
|
|
"github.com/stretchr/testify/require"
|
|
|
|
|
2023-11-20 04:06:36 +03:00
|
|
|
"github.com/neilotoole/sq/cli"
|
|
|
|
"github.com/neilotoole/sq/cli/testrun"
|
2020-08-23 13:42:15 +03:00
|
|
|
"github.com/neilotoole/sq/libsq/core/stringz"
|
2023-11-20 04:06:36 +03:00
|
|
|
"github.com/neilotoole/sq/libsq/core/tablefq"
|
2020-08-06 20:58:47 +03:00
|
|
|
"github.com/neilotoole/sq/libsq/source"
|
|
|
|
"github.com/neilotoole/sq/testh"
|
2023-11-20 04:06:36 +03:00
|
|
|
"github.com/neilotoole/sq/testh/proj"
|
2020-08-06 20:58:47 +03:00
|
|
|
"github.com/neilotoole/sq/testh/sakila"
|
|
|
|
)
|
|
|
|
|
2023-06-21 15:28:15 +03:00
|
|
|
// TestCmdSLQ_Insert_Create tests "sq QUERY --insert=@src.tbl".
|
2021-01-04 04:20:05 +03:00
|
|
|
func TestCmdSLQ_Insert_Create(t *testing.T) {
|
|
|
|
th := testh.New(t)
|
|
|
|
originSrc, destSrc := th.Source(sakila.SL3), th.Source(sakila.SL3)
|
|
|
|
srcTbl := sakila.TblActor
|
|
|
|
if th.IsMonotable(originSrc) {
|
|
|
|
srcTbl = source.MonotableName
|
|
|
|
}
|
|
|
|
|
2021-01-04 05:56:22 +03:00
|
|
|
destTbl := stringz.UniqSuffix(sakila.TblActor + "_copy")
|
2021-01-04 04:20:05 +03:00
|
|
|
|
2023-05-19 17:24:18 +03:00
|
|
|
tr := testrun.New(th.Context, t, nil).Add(*originSrc)
|
2021-01-04 04:20:05 +03:00
|
|
|
if destSrc.Handle != originSrc.Handle {
|
2023-05-19 17:24:18 +03:00
|
|
|
tr.Add(*destSrc)
|
2021-01-04 04:20:05 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
insertTo := fmt.Sprintf("%s.%s", destSrc.Handle, destTbl)
|
|
|
|
cols := stringz.PrefixSlice(sakila.TblActorCols(), ".")
|
|
|
|
query := fmt.Sprintf("%s.%s | %s", originSrc.Handle, srcTbl, strings.Join(cols, ", "))
|
|
|
|
|
2023-05-19 17:24:18 +03:00
|
|
|
err := tr.Exec("slq", "--insert="+insertTo, query)
|
2021-01-04 04:20:05 +03:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
2023-11-19 03:05:48 +03:00
|
|
|
sink, err := th.QuerySQL(destSrc, nil, "select * from "+destTbl)
|
2021-01-04 04:20:05 +03:00
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, sakila.TblActorCount, len(sink.Recs))
|
|
|
|
}
|
|
|
|
|
2020-08-07 22:51:30 +03:00
|
|
|
// TestCmdSLQ_Insert tests "sq slq QUERY --insert=dest.tbl".
|
2020-08-06 20:58:47 +03:00
|
|
|
func TestCmdSLQ_Insert(t *testing.T) {
|
2020-08-09 17:40:46 +03:00
|
|
|
for _, origin := range sakila.SQLLatest() {
|
2020-08-06 20:58:47 +03:00
|
|
|
origin := origin
|
|
|
|
|
|
|
|
t.Run("origin_"+origin, func(t *testing.T) {
|
2020-08-09 17:40:46 +03:00
|
|
|
for _, dest := range sakila.SQLLatest() {
|
2020-08-06 20:58:47 +03:00
|
|
|
dest := dest
|
|
|
|
|
|
|
|
t.Run("dest_"+dest, func(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
th := testh.New(t)
|
|
|
|
originSrc, destSrc := th.Source(origin), th.Source(dest)
|
|
|
|
srcTbl := sakila.TblActor
|
|
|
|
if th.IsMonotable(originSrc) {
|
|
|
|
srcTbl = source.MonotableName
|
|
|
|
}
|
|
|
|
|
|
|
|
// To avoid dirtying the destination table, we make a copy
|
|
|
|
// of it (without data).
|
2023-11-19 03:05:48 +03:00
|
|
|
tblName := th.CopyTable(
|
|
|
|
true,
|
|
|
|
destSrc,
|
|
|
|
tablefq.From(sakila.TblActor),
|
|
|
|
tablefq.T{},
|
|
|
|
false,
|
|
|
|
)
|
2020-08-06 20:58:47 +03:00
|
|
|
|
2023-05-19 17:24:18 +03:00
|
|
|
tr := testrun.New(th.Context, t, nil).Add(*originSrc)
|
2020-08-06 20:58:47 +03:00
|
|
|
if destSrc.Handle != originSrc.Handle {
|
2023-05-19 17:24:18 +03:00
|
|
|
tr.Add(*destSrc)
|
2020-08-06 20:58:47 +03:00
|
|
|
}
|
|
|
|
|
2020-08-13 06:22:53 +03:00
|
|
|
insertTo := fmt.Sprintf("%s.%s", destSrc.Handle, tblName)
|
2020-08-10 18:16:44 +03:00
|
|
|
cols := stringz.PrefixSlice(sakila.TblActorCols(), ".")
|
2020-08-06 20:58:47 +03:00
|
|
|
query := fmt.Sprintf("%s.%s | %s", originSrc.Handle, srcTbl, strings.Join(cols, ", "))
|
|
|
|
|
2023-05-19 17:24:18 +03:00
|
|
|
err := tr.Exec("slq", "--insert="+insertTo, query)
|
2020-08-06 20:58:47 +03:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
2023-11-19 03:05:48 +03:00
|
|
|
sink, err := th.QuerySQL(destSrc, nil, "select * from "+tblName)
|
2020-08-06 20:58:47 +03:00
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, sakila.TblActorCount, len(sink.Recs))
|
|
|
|
})
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestCmdSLQ_CSV(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
2023-04-22 16:37:07 +03:00
|
|
|
th := testh.New(t)
|
|
|
|
src := th.Source(sakila.CSVActor)
|
2023-05-19 17:24:18 +03:00
|
|
|
tr := testrun.New(th.Context, t, nil).Add(*src)
|
|
|
|
err := tr.Exec("slq", "--header=false", "--csv", fmt.Sprintf("%s.data", src.Handle))
|
2020-08-06 20:58:47 +03:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
2023-07-08 18:21:27 +03:00
|
|
|
recs := tr.BindCSV()
|
2020-08-06 20:58:47 +03:00
|
|
|
require.Equal(t, sakila.TblActorCount, len(recs))
|
|
|
|
}
|
|
|
|
|
|
|
|
// TestCmdSLQ_OutputFlag verifies that flag --output=<file> works.
|
|
|
|
func TestCmdSLQ_OutputFlag(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
2023-04-22 16:37:07 +03:00
|
|
|
th := testh.New(t)
|
|
|
|
src := th.Source(sakila.SL3)
|
2023-05-19 17:24:18 +03:00
|
|
|
tr := testrun.New(th.Context, t, nil).Add(*src)
|
2022-12-18 02:11:33 +03:00
|
|
|
outputFile, err := os.CreateTemp("", t.Name())
|
2020-08-06 20:58:47 +03:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
t.Cleanup(func() {
|
|
|
|
assert.NoError(t, outputFile.Close())
|
|
|
|
assert.NoError(t, os.Remove(outputFile.Name()))
|
|
|
|
})
|
|
|
|
|
2023-05-19 17:24:18 +03:00
|
|
|
err = tr.Exec("slq",
|
2020-08-09 16:46:46 +03:00
|
|
|
"--header=false", "--csv", fmt.Sprintf("%s.%s", src.Handle, sakila.TblActor),
|
2020-08-06 20:58:47 +03:00
|
|
|
"--output", outputFile.Name())
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
recs, err := csv.NewReader(outputFile).ReadAll()
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, sakila.TblActorCount, len(recs))
|
|
|
|
}
|
|
|
|
|
2023-07-03 18:34:19 +03:00
|
|
|
func TestCmdSLQ_Join_cross_source(t *testing.T) {
|
|
|
|
const queryTpl = `%s.customer | join(%s.address, .address_id) | where(.customer_id == %d) | .[0] | .customer_id, .email, .city_id` //nolint:lll
|
2023-07-08 18:21:27 +03:00
|
|
|
handles := sakila.SQLLatest()
|
2020-08-06 20:58:47 +03:00
|
|
|
|
|
|
|
// Attempt to join every SQL test source against every SQL test source.
|
|
|
|
for _, h1 := range handles {
|
|
|
|
h1 := h1
|
|
|
|
|
|
|
|
t.Run("origin_"+h1, func(t *testing.T) {
|
|
|
|
for _, h2 := range handles {
|
|
|
|
h2 := h2
|
|
|
|
|
|
|
|
t.Run("dest_"+h2, func(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
th := testh.New(t)
|
|
|
|
src1, src2 := th.Source(h1), th.Source(h2)
|
|
|
|
|
2023-05-19 17:24:18 +03:00
|
|
|
tr := testrun.New(th.Context, t, nil).Add(*src1)
|
2020-08-06 20:58:47 +03:00
|
|
|
if src2.Handle != src1.Handle {
|
2023-05-19 17:24:18 +03:00
|
|
|
tr.Add(*src2)
|
2020-08-06 20:58:47 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
query := fmt.Sprintf(queryTpl, src1.Handle, src2.Handle, sakila.MillerCustID)
|
|
|
|
|
2023-05-19 17:24:18 +03:00
|
|
|
err := tr.Exec("slq", "--header=false", "--csv", query)
|
2020-08-06 20:58:47 +03:00
|
|
|
require.NoError(t, err)
|
|
|
|
|
2023-07-08 18:21:27 +03:00
|
|
|
recs := tr.BindCSV()
|
2020-08-06 20:58:47 +03:00
|
|
|
require.Equal(t, 1, len(recs), "should only be one matching record")
|
|
|
|
require.Equal(t, 3, len(recs[0]), "should have three fields")
|
|
|
|
require.Equal(t, strconv.Itoa(sakila.MillerCustID), recs[0][0])
|
|
|
|
require.Equal(t, sakila.MillerEmail, recs[0][1])
|
|
|
|
require.Equal(t, strconv.Itoa(sakila.MillerCityID), recs[0][2])
|
|
|
|
})
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// TestCmdSLQ_ActiveSrcHandle verifies that source.ActiveHandle is
|
|
|
|
// interpreted as the active src in a SLQ query.
|
|
|
|
func TestCmdSLQ_ActiveSrcHandle(t *testing.T) {
|
2023-04-22 16:37:07 +03:00
|
|
|
th := testh.New(t)
|
|
|
|
src := th.Source(sakila.SL3)
|
2020-08-06 20:58:47 +03:00
|
|
|
|
|
|
|
// 1. Verify that the query works as expected using the actual src handle
|
2023-05-19 17:24:18 +03:00
|
|
|
tr := testrun.New(th.Context, t, nil).Add(*src).Hush()
|
2020-08-06 20:58:47 +03:00
|
|
|
|
2023-05-19 17:24:18 +03:00
|
|
|
require.Equal(t, src.Handle, tr.Run.Config.Collection.Active().Handle)
|
|
|
|
err := tr.Exec("slq", "--header=false", "--csv", "@sakila_sl3.actor")
|
2020-08-06 20:58:47 +03:00
|
|
|
require.NoError(t, err)
|
2023-07-08 18:21:27 +03:00
|
|
|
recs := tr.BindCSV()
|
2020-08-06 20:58:47 +03:00
|
|
|
require.Equal(t, sakila.TblActorCount, len(recs))
|
|
|
|
|
|
|
|
// 2. Verify that it works using source.ActiveHandle as the src handle
|
2023-05-19 17:24:18 +03:00
|
|
|
tr = testrun.New(th.Context, t, nil).Add(*src).Hush()
|
|
|
|
require.Equal(t, src.Handle, tr.Run.Config.Collection.Active().Handle)
|
|
|
|
err = tr.Exec("slq", "--header=false", "--csv", source.ActiveHandle+".actor")
|
2020-08-06 20:58:47 +03:00
|
|
|
require.NoError(t, err)
|
2023-07-08 18:21:27 +03:00
|
|
|
recs = tr.BindCSV()
|
2020-08-06 20:58:47 +03:00
|
|
|
require.Equal(t, sakila.TblActorCount, len(recs))
|
|
|
|
}
|
2023-06-21 15:28:15 +03:00
|
|
|
|
2023-11-19 03:05:48 +03:00
|
|
|
func TestCmdSLQ_PreprocessFlagArgVars(t *testing.T) {
|
2023-06-21 15:28:15 +03:00
|
|
|
testCases := []struct {
|
|
|
|
name string
|
|
|
|
in []string
|
|
|
|
want []string
|
|
|
|
wantErr bool
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
name: "empty",
|
|
|
|
in: []string{},
|
|
|
|
want: []string{},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "no flags",
|
|
|
|
in: []string{".actor"},
|
|
|
|
want: []string{".actor"},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "non-arg flag",
|
|
|
|
in: []string{"--json", ".actor"},
|
|
|
|
want: []string{"--json", ".actor"},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "non-arg flag with value",
|
|
|
|
in: []string{"--json", "true", ".actor"},
|
|
|
|
want: []string{"--json", "true", ".actor"},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "single arg flag",
|
|
|
|
in: []string{"--arg", "name", "TOM", ".actor"},
|
|
|
|
want: []string{"--arg", "name:TOM", ".actor"},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "invalid arg name",
|
|
|
|
in: []string{"--arg", "na me", "TOM", ".actor"},
|
|
|
|
wantErr: true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "invalid arg name (with colon)",
|
|
|
|
in: []string{"--arg", "na:me", "TOM", ".actor"},
|
|
|
|
wantErr: true,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "colon in value",
|
|
|
|
in: []string{"--arg", "name", "T:OM", ".actor"},
|
|
|
|
want: []string{"--arg", "name:T:OM", ".actor"},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "single arg flag with whitespace",
|
|
|
|
in: []string{"--arg", "name", "TOM DOWD", ".actor"},
|
|
|
|
want: []string{"--arg", "name:TOM DOWD", ".actor"},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "two arg flags",
|
|
|
|
in: []string{"--arg", "name", "TOM", "--arg", "eyes", "blue", ".actor"},
|
|
|
|
want: []string{"--arg", "name:TOM", "--arg", "eyes:blue", ".actor"},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
name: "two arg flags with interspersed flag",
|
|
|
|
in: []string{"--arg", "name", "TOM", "--json", "true", "--arg", "eyes", "blue", ".actor"},
|
|
|
|
want: []string{"--arg", "name:TOM", "--json", "true", "--arg", "eyes:blue", ".actor"},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tc := range testCases {
|
|
|
|
tc := tc
|
|
|
|
t.Run(tc.name, func(t *testing.T) {
|
|
|
|
got, gotErr := cli.PreprocessFlagArgVars(tc.in)
|
|
|
|
if tc.wantErr {
|
|
|
|
t.Log(gotErr.Error())
|
|
|
|
require.Error(t, gotErr)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
require.NoError(t, gotErr)
|
|
|
|
require.EqualValues(t, tc.want, got)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
2023-11-19 03:05:48 +03:00
|
|
|
|
|
|
|
func TestCmdSLQ_FlagActiveSource(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
ctx := context.Background()
|
|
|
|
tr := testrun.New(ctx, t, nil)
|
|
|
|
|
|
|
|
// @sqlite will be the active source
|
|
|
|
require.NoError(t, tr.Exec("add", proj.Abs(sakila.PathSL3), "--handle", "@sqlite"))
|
|
|
|
|
|
|
|
tr = testrun.New(ctx, t, tr)
|
|
|
|
require.NoError(t, tr.Exec("add", proj.Abs(sakila.PathCSVActor), "--handle", "@csv"))
|
|
|
|
|
|
|
|
tr = testrun.New(ctx, t, tr)
|
|
|
|
require.NoError(t, tr.Exec(
|
|
|
|
"--csv",
|
|
|
|
"--no-header",
|
|
|
|
".actor",
|
|
|
|
))
|
|
|
|
require.Len(t, tr.BindCSV(), sakila.TblActorCount)
|
|
|
|
|
|
|
|
// Now, use flag.ActiveSrc to switch the source.
|
|
|
|
tr = testrun.New(ctx, t, tr)
|
|
|
|
require.NoError(t, tr.Exec(
|
|
|
|
"--csv",
|
|
|
|
"--no-header",
|
|
|
|
"--src", "@csv",
|
|
|
|
".data",
|
|
|
|
))
|
|
|
|
require.Len(t, tr.BindCSV(), sakila.TblActorCount)
|
|
|
|
|
|
|
|
// Double check that we didn't change the persisted active source
|
|
|
|
tr = testrun.New(ctx, t, tr)
|
|
|
|
require.NoError(t, tr.Exec("src", "--json"))
|
|
|
|
require.Equal(t, "@sqlite", tr.BindMap()["handle"])
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestCmdSLQ_FlagActiveSchema(t *testing.T) {
|
|
|
|
testCases := []struct {
|
|
|
|
handle string
|
|
|
|
|
|
|
|
// skipReason is the reason to skip the test case.
|
|
|
|
skipReason string
|
|
|
|
|
|
|
|
// defaultCatalog is the default catalog for the source.
|
|
|
|
defaultCatalog string
|
|
|
|
|
|
|
|
// defaultSchema is the default schema for the source,
|
|
|
|
// e.g. "public" for Pg, or "dbo" for SQL Server.
|
|
|
|
defaultSchema string
|
|
|
|
|
|
|
|
// altCatalog is the name of a second catalog that
|
|
|
|
// we know to exist in the source. For example, "model"
|
|
|
|
// for SQL Server, or "postgres" for Postgres.
|
|
|
|
altCatalog string
|
|
|
|
|
|
|
|
// expectSchemaFuncValue is the value we expect
|
|
|
|
// the SLQ "schema()" func to return. Generally one would
|
|
|
|
// expect this to be the same as the value supplied
|
|
|
|
// to --src.schema, but for SQL Server, the SLQ "schema()"
|
|
|
|
// func does not honor --src.schema. This is a limitation in
|
|
|
|
// SQL Server itself; it's not possible to change the default
|
|
|
|
// schema for a connection.
|
|
|
|
expectSchemaFuncValue string
|
|
|
|
}{
|
|
|
|
{
|
|
|
|
handle: sakila.Pg,
|
|
|
|
defaultCatalog: "sakila",
|
|
|
|
defaultSchema: "public",
|
|
|
|
altCatalog: "postgres",
|
|
|
|
expectSchemaFuncValue: "information_schema",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
handle: sakila.MS,
|
|
|
|
defaultCatalog: "sakila",
|
|
|
|
defaultSchema: "dbo",
|
|
|
|
altCatalog: "model",
|
|
|
|
expectSchemaFuncValue: "dbo",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
handle: sakila.My,
|
|
|
|
defaultCatalog: "def",
|
|
|
|
defaultSchema: "sakila",
|
|
|
|
altCatalog: "model",
|
|
|
|
expectSchemaFuncValue: "information_schema",
|
|
|
|
},
|
|
|
|
{
|
|
|
|
handle: sakila.SL3,
|
|
|
|
skipReason: `SQLite 'schema' support requires implementing 'ATTACH DATABASE'.
|
|
|
|
See: https://github.com/neilotoole/sq/issues/324`,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tc := range testCases {
|
|
|
|
tc := tc
|
|
|
|
t.Run(tc.handle, func(t *testing.T) {
|
|
|
|
if tc.skipReason != "" {
|
|
|
|
t.Skip(tc.skipReason)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
th := testh.New(t)
|
|
|
|
src := th.Source(tc.handle)
|
|
|
|
|
|
|
|
tr := testrun.New(th.Context, t, nil).
|
|
|
|
Add(*th.Source(sakila.CSVActor), *src)
|
|
|
|
|
|
|
|
// Confirm that sakila.CSVActor is the active source.
|
|
|
|
require.NoError(t, tr.Exec("src"))
|
|
|
|
require.Equal(t, sakila.CSVActor, tr.OutString())
|
|
|
|
|
|
|
|
// Test combination of --src and --src.schema
|
|
|
|
const qInfoSchemaActor = `.tables | .table_catalog, .table_schema, .table_name, .table_type | where(.table_name == "actor")` //nolint:lll
|
|
|
|
require.NoError(t, tr.Reset().Exec("--csv", "-H",
|
|
|
|
"--src", tc.handle,
|
|
|
|
"--src.schema", "information_schema",
|
|
|
|
qInfoSchemaActor,
|
|
|
|
))
|
|
|
|
|
|
|
|
want := [][]string{{tc.defaultCatalog, tc.defaultSchema, "actor", "BASE TABLE"}}
|
|
|
|
got := tr.BindCSV()
|
|
|
|
require.Equal(t, want, got)
|
|
|
|
|
|
|
|
require.NoError(t, tr.Reset().Exec("src", tc.handle))
|
|
|
|
|
|
|
|
require.NoError(t, tr.Reset().Exec("-H", "schema()"))
|
|
|
|
require.Equal(t, tc.defaultSchema, tr.OutString())
|
|
|
|
|
|
|
|
// Test just --src.schema (schema part only)
|
|
|
|
require.NoError(t, tr.Reset().Exec("--csv", "-H",
|
|
|
|
"--src.schema", "information_schema",
|
|
|
|
qInfoSchemaActor,
|
|
|
|
))
|
|
|
|
got = tr.BindCSV()
|
|
|
|
require.Equal(t, want, got)
|
|
|
|
|
|
|
|
if th.SQLDriverFor(src).Dialect().Catalog {
|
|
|
|
// Test --src.schema (catalog and schema parts)
|
|
|
|
require.NoError(t, tr.Reset().Exec("--csv", "-H",
|
|
|
|
"--src.schema", tc.altCatalog+".information_schema",
|
|
|
|
`.schemata | .catalog_name | unique`,
|
|
|
|
))
|
|
|
|
|
|
|
|
got = tr.BindCSV()
|
|
|
|
require.Equal(t, tc.altCatalog, got[0][0])
|
|
|
|
}
|
|
|
|
|
|
|
|
// Note that for SQL Server, the SLQ "schema()"
|
|
|
|
// func does not honor --src.schema. This is a limitation in
|
|
|
|
// SQL Server itself; it's not possible to change the default
|
|
|
|
// schema for a connection.
|
|
|
|
require.NoError(t, tr.Reset().Exec("-H",
|
|
|
|
"--src.schema", "information_schema",
|
|
|
|
"schema()"))
|
|
|
|
require.Equal(t, tc.expectSchemaFuncValue, tr.OutString())
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|