mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
cli: fix seeds incorrectly being applied to databases
GitOrigin-RevId: cce2612ddfd90d1fd10a0a3561d24c64569a6935
This commit is contained in:
parent
b875d95a75
commit
cc3539415b
@ -11,6 +11,7 @@
|
|||||||
- server: update pg_dump clean output to disable function body validation in create function statements to avoid errors due to forward references
|
- server: update pg_dump clean output to disable function body validation in create function statements to avoid errors due to forward references
|
||||||
- server: fix a bug preventing some MSSQL foreign key relationships from being tracked
|
- server: fix a bug preventing some MSSQL foreign key relationships from being tracked
|
||||||
- console: add a comment field for actions (#231)
|
- console: add a comment field for actions (#231)
|
||||||
|
- cli: fix seeds incorrectly being applied to databases in config v3 (#6683)
|
||||||
|
|
||||||
## v2.0.0-alpha.6
|
## v2.0.0-alpha.6
|
||||||
|
|
||||||
|
@ -23,6 +23,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura/pgdump"
|
||||||
|
|
||||||
"github.com/hasura/graphql-engine/cli/migrate/database/hasuradb"
|
"github.com/hasura/graphql-engine/cli/migrate/database/hasuradb"
|
||||||
|
|
||||||
"github.com/hasura/graphql-engine/cli/internal/hasura/v1metadata"
|
"github.com/hasura/graphql-engine/cli/internal/hasura/v1metadata"
|
||||||
@ -249,6 +251,12 @@ func (s *ServerConfig) GetV2QueryEndpoint() string {
|
|||||||
return nurl.String()
|
return nurl.String()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *ServerConfig) GetPGDumpEndpoint() string {
|
||||||
|
nurl := *s.ParsedEndpoint
|
||||||
|
nurl.Path = path.Join(nurl.Path, s.APIPaths.PGDump)
|
||||||
|
return nurl.String()
|
||||||
|
}
|
||||||
|
|
||||||
// GetQueryEndpoint provides the url to contact the query API
|
// GetQueryEndpoint provides the url to contact the query API
|
||||||
func (s *ServerConfig) GetV1MetadataEndpoint() string {
|
func (s *ServerConfig) GetV1MetadataEndpoint() string {
|
||||||
nurl := *s.ParsedEndpoint
|
nurl := *s.ParsedEndpoint
|
||||||
@ -681,6 +689,7 @@ func (ec *ExecutionContext) Validate() error {
|
|||||||
V1Metadata: v1metadata.New(httpClient, ec.Config.GetV1MetadataEndpoint()),
|
V1Metadata: v1metadata.New(httpClient, ec.Config.GetV1MetadataEndpoint()),
|
||||||
V1Query: v1query.New(httpClient, ec.Config.GetV1QueryEndpoint()),
|
V1Query: v1query.New(httpClient, ec.Config.GetV1QueryEndpoint()),
|
||||||
V2Query: v2query.New(httpClient, ec.Config.GetV2QueryEndpoint()),
|
V2Query: v2query.New(httpClient, ec.Config.GetV2QueryEndpoint()),
|
||||||
|
PGDump: pgdump.New(httpClient, ec.Config.GetPGDumpEndpoint()),
|
||||||
}
|
}
|
||||||
var state *util.ServerState
|
var state *util.ServerState
|
||||||
if ec.HasMetadataV3 {
|
if ec.HasMetadataV3 {
|
||||||
|
@ -3,6 +3,8 @@ package commands
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/seed"
|
||||||
|
|
||||||
"github.com/hasura/graphql-engine/cli"
|
"github.com/hasura/graphql-engine/cli"
|
||||||
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
"github.com/hasura/graphql-engine/cli/internal/metadatautil"
|
"github.com/hasura/graphql-engine/cli/internal/metadatautil"
|
||||||
@ -76,3 +78,12 @@ func NewSeedCmd(ec *cli.ExecutionContext) *cobra.Command {
|
|||||||
|
|
||||||
return seedCmd
|
return seedCmd
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getSeedDriver(configVersion cli.ConfigVersion) (driver *seed.Driver) {
|
||||||
|
if configVersion >= cli.V3 {
|
||||||
|
driver = seed.NewDriver(ec.APIClient.V2Query.Bulk, ec.APIClient.PGDump)
|
||||||
|
} else {
|
||||||
|
driver = seed.NewDriver(ec.APIClient.V1Query.Bulk, ec.APIClient.PGDump)
|
||||||
|
}
|
||||||
|
return driver
|
||||||
|
}
|
||||||
|
@ -1,8 +1,6 @@
|
|||||||
package commands
|
package commands
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
|
||||||
"github.com/hasura/graphql-engine/cli/migrate"
|
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
|
|
||||||
@ -12,6 +10,7 @@ import (
|
|||||||
|
|
||||||
type SeedApplyOptions struct {
|
type SeedApplyOptions struct {
|
||||||
EC *cli.ExecutionContext
|
EC *cli.ExecutionContext
|
||||||
|
Driver *seed.Driver
|
||||||
|
|
||||||
// seed file to apply
|
// seed file to apply
|
||||||
FileNames []string
|
FileNames []string
|
||||||
@ -35,6 +34,7 @@ func newSeedApplyCmd(ec *cli.ExecutionContext) *cobra.Command {
|
|||||||
return ec.Validate()
|
return ec.Validate()
|
||||||
},
|
},
|
||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
|
opts.Driver = getSeedDriver(ec.Config.Version)
|
||||||
opts.EC.Spin("Applying seeds...")
|
opts.EC.Spin("Applying seeds...")
|
||||||
opts.Source = ec.Source
|
opts.Source = ec.Source
|
||||||
err := opts.Run()
|
err := opts.Run()
|
||||||
@ -51,10 +51,6 @@ func newSeedApplyCmd(ec *cli.ExecutionContext) *cobra.Command {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (o *SeedApplyOptions) Run() error {
|
func (o *SeedApplyOptions) Run() error {
|
||||||
migrateDriver, err := migrate.NewMigrate(o.EC, true, "", hasura.SourceKindPG)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
fs := afero.NewOsFs()
|
fs := afero.NewOsFs()
|
||||||
return seed.ApplySeedsToDatabase(o.EC, fs, migrateDriver, o.FileNames, o.Source.Name)
|
return o.Driver.ApplySeedsToDatabase(fs, o.EC.SeedsDirectory, o.FileNames, o.EC.Source)
|
||||||
}
|
}
|
||||||
|
@ -2,10 +2,13 @@ package commands
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
"github.com/hasura/graphql-engine/cli/migrate"
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
@ -17,6 +20,7 @@ import (
|
|||||||
|
|
||||||
type SeedNewOptions struct {
|
type SeedNewOptions struct {
|
||||||
EC *cli.ExecutionContext
|
EC *cli.ExecutionContext
|
||||||
|
Driver *seed.Driver
|
||||||
|
|
||||||
// filename for the new seed file
|
// filename for the new seed file
|
||||||
SeedName string
|
SeedName string
|
||||||
@ -51,6 +55,7 @@ func newSeedCreateCmd(ec *cli.ExecutionContext) *cobra.Command {
|
|||||||
RunE: func(cmd *cobra.Command, args []string) error {
|
RunE: func(cmd *cobra.Command, args []string) error {
|
||||||
opts.SeedName = args[0]
|
opts.SeedName = args[0]
|
||||||
opts.Source = ec.Source
|
opts.Source = ec.Source
|
||||||
|
opts.Driver = getSeedDriver(ec.Config.Version)
|
||||||
err := opts.Run()
|
err := opts.Run()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -76,21 +81,23 @@ func (o *SeedNewOptions) Run() error {
|
|||||||
UserProvidedSeedName: o.SeedName,
|
UserProvidedSeedName: o.SeedName,
|
||||||
DirectoryPath: filepath.Join(o.EC.SeedsDirectory, o.Source.Name),
|
DirectoryPath: filepath.Join(o.EC.SeedsDirectory, o.Source.Name),
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we are initializing from a database table
|
// If we are initializing from a database table
|
||||||
// create a hasura client and add table name opts
|
// create a hasura client and add table name opts
|
||||||
if createSeedOpts.Data == nil {
|
if createSeedOpts.Data == nil {
|
||||||
var body []byte
|
var body []byte
|
||||||
if len(o.FromTableNames) > 0 {
|
if len(o.FromTableNames) > 0 {
|
||||||
migrateDriver, err := migrate.NewMigrate(ec, true, o.Source.Name, o.Source.Kind)
|
if o.Source.Kind != hasura.SourceKindPG && o.EC.Config.Version >= cli.V3 {
|
||||||
if err != nil {
|
return fmt.Errorf("--from-table is supported only for postgres sources")
|
||||||
return errors.Wrap(err, "cannot initialize migrate driver")
|
|
||||||
}
|
}
|
||||||
// Send the query
|
// Send the query
|
||||||
body, err = migrateDriver.ExportDataDump(o.FromTableNames, o.Source.Name, o.Source.Kind)
|
bodyReader, err := o.Driver.ExportDatadump(o.FromTableNames, o.Source.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "exporting seed data")
|
return errors.Wrap(err, "exporting seed data")
|
||||||
}
|
}
|
||||||
|
body, err = ioutil.ReadAll(bodyReader)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
const defaultText = ""
|
const defaultText = ""
|
||||||
var err error
|
var err error
|
||||||
|
@ -125,6 +125,7 @@ func TestSeedsApplyCmd(t *testing.T, ec *cli.ExecutionContext) {
|
|||||||
"can apply all seeds",
|
"can apply all seeds",
|
||||||
&commands.SeedApplyOptions{
|
&commands.SeedApplyOptions{
|
||||||
EC: ec,
|
EC: ec,
|
||||||
|
Driver: seed.NewDriver(ec.APIClient.V1Query.Bulk, ec.APIClient.PGDump),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@ -133,6 +134,7 @@ func TestSeedsApplyCmd(t *testing.T, ec *cli.ExecutionContext) {
|
|||||||
&commands.SeedApplyOptions{
|
&commands.SeedApplyOptions{
|
||||||
EC: ec,
|
EC: ec,
|
||||||
FileNames: []string{"1591867862409_test.sql"},
|
FileNames: []string{"1591867862409_test.sql"},
|
||||||
|
Driver: seed.NewDriver(ec.APIClient.V1Query.Bulk, ec.APIClient.PGDump),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@ -141,6 +143,7 @@ func TestSeedsApplyCmd(t *testing.T, ec *cli.ExecutionContext) {
|
|||||||
&commands.SeedApplyOptions{
|
&commands.SeedApplyOptions{
|
||||||
EC: ec,
|
EC: ec,
|
||||||
FileNames: []string{"1591867862419_test2.sql"},
|
FileNames: []string{"1591867862419_test2.sql"},
|
||||||
|
Driver: seed.NewDriver(ec.APIClient.V1Query.Bulk, ec.APIClient.PGDump),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
|
@ -130,6 +130,7 @@ func TestSeedsApplyCmd(t *testing.T, ec *cli.ExecutionContext) {
|
|||||||
&commands.SeedApplyOptions{
|
&commands.SeedApplyOptions{
|
||||||
EC: ec,
|
EC: ec,
|
||||||
Source: cli.Source{Name: "default"},
|
Source: cli.Source{Name: "default"},
|
||||||
|
Driver: seed.NewDriver(ec.APIClient.V2Query.Bulk, ec.APIClient.PGDump),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@ -138,6 +139,7 @@ func TestSeedsApplyCmd(t *testing.T, ec *cli.ExecutionContext) {
|
|||||||
&commands.SeedApplyOptions{
|
&commands.SeedApplyOptions{
|
||||||
EC: ec,
|
EC: ec,
|
||||||
FileNames: []string{"1591867862409_test.sql"},
|
FileNames: []string{"1591867862409_test.sql"},
|
||||||
|
Driver: seed.NewDriver(ec.APIClient.V2Query.Bulk, ec.APIClient.PGDump),
|
||||||
},
|
},
|
||||||
false,
|
false,
|
||||||
},
|
},
|
||||||
@ -146,6 +148,7 @@ func TestSeedsApplyCmd(t *testing.T, ec *cli.ExecutionContext) {
|
|||||||
&commands.SeedApplyOptions{
|
&commands.SeedApplyOptions{
|
||||||
EC: ec,
|
EC: ec,
|
||||||
FileNames: []string{"1591867862419_test2.sql"},
|
FileNames: []string{"1591867862419_test2.sql"},
|
||||||
|
Driver: seed.NewDriver(ec.APIClient.V2Query.Bulk, ec.APIClient.PGDump),
|
||||||
},
|
},
|
||||||
true,
|
true,
|
||||||
},
|
},
|
||||||
|
@ -12,12 +12,14 @@ type Client struct {
|
|||||||
V1Metadata V1Metadata
|
V1Metadata V1Metadata
|
||||||
V1Query V1Query
|
V1Query V1Query
|
||||||
V2Query V2Query
|
V2Query V2Query
|
||||||
|
PGDump PGDump
|
||||||
}
|
}
|
||||||
|
|
||||||
type V1Query interface {
|
type V1Query interface {
|
||||||
CommonMetadataOperations
|
CommonMetadataOperations
|
||||||
PGSourceOps
|
PGSourceOps
|
||||||
Send(requestBody interface{}) (httpcResponse *httpc.Response, body io.Reader, error error)
|
Send(requestBody interface{}) (httpcResponse *httpc.Response, body io.Reader, error error)
|
||||||
|
Bulk([]RequestBody) (io.Reader, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type V1Metadata interface {
|
type V1Metadata interface {
|
||||||
@ -43,6 +45,7 @@ type V2Query interface {
|
|||||||
PGSourceOps
|
PGSourceOps
|
||||||
MSSQLSourceOps
|
MSSQLSourceOps
|
||||||
Send(requestBody interface{}) (httpcResponse *httpc.Response, body io.Reader, error error)
|
Send(requestBody interface{}) (httpcResponse *httpc.Response, body io.Reader, error error)
|
||||||
|
Bulk([]RequestBody) (io.Reader, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
type RequestBody struct {
|
type RequestBody struct {
|
||||||
|
15
cli/internal/hasura/pgdump.go
Normal file
15
cli/internal/hasura/pgdump.go
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
package hasura
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
type PGDumpRequest struct {
|
||||||
|
Opts []string `json:"opts"`
|
||||||
|
CleanOutput bool `json:"clean_output"`
|
||||||
|
SourceName string `json:"source,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type PGDump interface {
|
||||||
|
Send(request PGDumpRequest) (responseBody io.Reader, error error)
|
||||||
|
}
|
46
cli/internal/hasura/pgdump/pgdump.go
Normal file
46
cli/internal/hasura/pgdump/pgdump.go
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
package pgdump
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/httpc"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
|
)
|
||||||
|
|
||||||
|
type Client struct {
|
||||||
|
*httpc.Client
|
||||||
|
path string
|
||||||
|
}
|
||||||
|
|
||||||
|
func New(client *httpc.Client, path string) *Client {
|
||||||
|
return &Client{client, path}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Client) send(body interface{}, responseBodyWriter io.Writer) (*httpc.Response, error) {
|
||||||
|
req, err := c.NewRequest(http.MethodPost, c.path, body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
resp, err := c.LockAndDo(context.Background(), req, responseBodyWriter)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return resp, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Client) Send(request hasura.PGDumpRequest) (io.Reader, error) {
|
||||||
|
responseBody := new(bytes.Buffer)
|
||||||
|
response, err := c.send(request, responseBody)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if response.StatusCode != http.StatusOK {
|
||||||
|
return nil, fmt.Errorf("pg_dump request: %d %s", response.StatusCode, responseBody.String())
|
||||||
|
}
|
||||||
|
return responseBody, nil
|
||||||
|
}
|
111
cli/internal/hasura/pgdump/pgdump_test.go
Normal file
111
cli/internal/hasura/pgdump/pgdump_test.go
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
package pgdump
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
pg "github.com/hasura/graphql-engine/cli/internal/hasura/sourceops/postgres"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/testutil"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/httpc"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestClient_Send(t *testing.T) {
|
||||||
|
portHasuraV13, teardown13 := testutil.StartHasura(t, "v1.3.3")
|
||||||
|
defer teardown13()
|
||||||
|
portHasuraLatest, teardownLatest := testutil.StartHasura(t, testutil.HasuraVersion)
|
||||||
|
defer teardownLatest()
|
||||||
|
type fields struct {
|
||||||
|
Client *httpc.Client
|
||||||
|
path string
|
||||||
|
}
|
||||||
|
type args struct {
|
||||||
|
request hasura.PGDumpRequest
|
||||||
|
}
|
||||||
|
|
||||||
|
pgclient := pg.New(testutil.NewHttpcClient(t, portHasuraV13, nil), "v1/query")
|
||||||
|
sqlInput := hasura.PGRunSQLInput{
|
||||||
|
SQL: `CREATE TABLE test (
|
||||||
|
section NUMERIC NOT NULL,
|
||||||
|
id1 NUMERIC NOT NULL,
|
||||||
|
id2 NUMERIC NOT NULL
|
||||||
|
);`,
|
||||||
|
}
|
||||||
|
_, err := pgclient.PGRunSQL(sqlInput)
|
||||||
|
require.NoError(t, err)
|
||||||
|
pgclient = pg.New(testutil.NewHttpcClient(t, portHasuraLatest, nil), "v2/query")
|
||||||
|
_, err = pgclient.PGRunSQL(sqlInput)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
fields fields
|
||||||
|
args args
|
||||||
|
want string
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"can make a pg_dump v1.3.3",
|
||||||
|
fields{
|
||||||
|
Client: testutil.NewHttpcClient(t, portHasuraV13, nil),
|
||||||
|
path: "/v1alpha1/pg_dump",
|
||||||
|
},
|
||||||
|
args{
|
||||||
|
request: hasura.PGDumpRequest{
|
||||||
|
Opts: []string{"--schema-only", "--table", "test"},
|
||||||
|
CleanOutput: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
`CREATE TABLE public.test (
|
||||||
|
section numeric NOT NULL,
|
||||||
|
id1 numeric NOT NULL,
|
||||||
|
id2 numeric NOT NULL
|
||||||
|
);
|
||||||
|
ALTER TABLE public.test OWNER TO postgres;
|
||||||
|
`,
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"can make a pg_dump on latest",
|
||||||
|
fields{
|
||||||
|
Client: testutil.NewHttpcClient(t, portHasuraLatest, nil),
|
||||||
|
path: "/v1alpha1/pg_dump",
|
||||||
|
},
|
||||||
|
args{
|
||||||
|
request: hasura.PGDumpRequest{
|
||||||
|
Opts: []string{"--schema-only", "--table", "test"},
|
||||||
|
CleanOutput: true,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
`CREATE TABLE public.test (
|
||||||
|
section numeric NOT NULL,
|
||||||
|
id1 numeric NOT NULL,
|
||||||
|
id2 numeric NOT NULL
|
||||||
|
);
|
||||||
|
ALTER TABLE public.test OWNER TO postgres;
|
||||||
|
`,
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
c := &Client{
|
||||||
|
Client: tt.fields.Client,
|
||||||
|
path: tt.fields.path,
|
||||||
|
}
|
||||||
|
got, err := c.Send(tt.args.request)
|
||||||
|
if tt.wantErr {
|
||||||
|
require.Error(t, err)
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
gotb, err := ioutil.ReadAll(got)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, tt.want, string(gotb))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -3,9 +3,12 @@ package v1query
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
|
|
||||||
"github.com/hasura/graphql-engine/cli/internal/hasura/commonmetadata"
|
"github.com/hasura/graphql-engine/cli/internal/hasura/commonmetadata"
|
||||||
"github.com/hasura/graphql-engine/cli/internal/hasura/sourceops/postgres"
|
"github.com/hasura/graphql-engine/cli/internal/hasura/sourceops/postgres"
|
||||||
"github.com/hasura/graphql-engine/cli/internal/httpc"
|
"github.com/hasura/graphql-engine/cli/internal/httpc"
|
||||||
@ -42,3 +45,22 @@ func (c *Client) Send(body interface{}) (*httpc.Response, io.Reader, error) {
|
|||||||
}
|
}
|
||||||
return resp, responseBody, nil
|
return resp, responseBody, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *Client) Bulk(args []hasura.RequestBody) (io.Reader, error) {
|
||||||
|
body := hasura.RequestBody{
|
||||||
|
Type: "bulk",
|
||||||
|
Args: args,
|
||||||
|
}
|
||||||
|
req, err := c.NewRequest(http.MethodPost, c.path, body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
responseBody := new(bytes.Buffer)
|
||||||
|
resp, err := c.LockAndDo(context.Background(), req, responseBody)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, fmt.Errorf("bulk request failed: %v %v", resp.StatusCode, responseBody.String())
|
||||||
|
}
|
||||||
|
return responseBody, nil
|
||||||
|
}
|
||||||
|
@ -6,6 +6,11 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura/sourceops/postgres"
|
||||||
pg "github.com/hasura/graphql-engine/cli/internal/hasura/sourceops/postgres"
|
pg "github.com/hasura/graphql-engine/cli/internal/hasura/sourceops/postgres"
|
||||||
|
|
||||||
"github.com/hasura/graphql-engine/cli/internal/hasura/commonmetadata"
|
"github.com/hasura/graphql-engine/cli/internal/hasura/commonmetadata"
|
||||||
@ -83,3 +88,133 @@ func TestClient_Send(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestClient_Bulk(t *testing.T) {
|
||||||
|
port, teardown := testutil.StartHasura(t, "v1.3.3")
|
||||||
|
defer teardown()
|
||||||
|
type fields struct {
|
||||||
|
Client *httpc.Client
|
||||||
|
path string
|
||||||
|
SourceOps *postgres.SourceOps
|
||||||
|
ClientCommonMetadataOps *commonmetadata.ClientCommonMetadataOps
|
||||||
|
}
|
||||||
|
type args struct {
|
||||||
|
args []hasura.RequestBody
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
fields fields
|
||||||
|
args args
|
||||||
|
want string
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"can send a bulk request",
|
||||||
|
fields{
|
||||||
|
Client: func() *httpc.Client {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", port), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
}(),
|
||||||
|
path: "v1/query",
|
||||||
|
},
|
||||||
|
args{
|
||||||
|
args: []hasura.RequestBody{
|
||||||
|
{
|
||||||
|
Type: "run_sql",
|
||||||
|
Version: 0,
|
||||||
|
Args: hasura.PGRunSQLInput{
|
||||||
|
SQL: "select 1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "run_sql",
|
||||||
|
Version: 0,
|
||||||
|
Args: hasura.PGRunSQLInput{
|
||||||
|
SQL: "select 1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
`[
|
||||||
|
{
|
||||||
|
"result_type": "TuplesOk",
|
||||||
|
"result": [
|
||||||
|
[
|
||||||
|
"?column?"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"1"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"result_type": "TuplesOk",
|
||||||
|
"result": [
|
||||||
|
[
|
||||||
|
"?column?"
|
||||||
|
],
|
||||||
|
[
|
||||||
|
"1"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]`,
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"can throw error on a bad request",
|
||||||
|
fields{
|
||||||
|
Client: func() *httpc.Client {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", port), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
}(),
|
||||||
|
path: "v1/query",
|
||||||
|
},
|
||||||
|
args{
|
||||||
|
args: []hasura.RequestBody{
|
||||||
|
{
|
||||||
|
Type: "run_sql",
|
||||||
|
Version: 0,
|
||||||
|
Args: hasura.PGRunSQLInput{
|
||||||
|
SQL: "select something crazy!",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "run_sql",
|
||||||
|
Version: 0,
|
||||||
|
Args: hasura.PGRunSQLInput{
|
||||||
|
SQL: "select 1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
``,
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
c := &Client{
|
||||||
|
Client: tt.fields.Client,
|
||||||
|
path: tt.fields.path,
|
||||||
|
SourceOps: tt.fields.SourceOps,
|
||||||
|
ClientCommonMetadataOps: tt.fields.ClientCommonMetadataOps,
|
||||||
|
}
|
||||||
|
got, err := c.Bulk(tt.args.args)
|
||||||
|
if tt.wantErr {
|
||||||
|
require.Error(t, err)
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
gotb, err := ioutil.ReadAll(got)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, tt.want, string(gotb))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -6,6 +6,9 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
pg "github.com/hasura/graphql-engine/cli/internal/hasura/sourceops/postgres"
|
pg "github.com/hasura/graphql-engine/cli/internal/hasura/sourceops/postgres"
|
||||||
|
|
||||||
"github.com/hasura/graphql-engine/cli/internal/hasura/commonmetadata"
|
"github.com/hasura/graphql-engine/cli/internal/hasura/commonmetadata"
|
||||||
@ -90,3 +93,111 @@ func TestClient_Send(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestClient_Bulk(t *testing.T) {
|
||||||
|
port, mssqlSourceName, teardown := testutil.StartHasuraWithMSSQLSource(t, testutil.HasuraVersion)
|
||||||
|
defer teardown()
|
||||||
|
type fields struct {
|
||||||
|
Client *httpc.Client
|
||||||
|
path string
|
||||||
|
}
|
||||||
|
type args struct {
|
||||||
|
args []hasura.RequestBody
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
fields fields
|
||||||
|
args args
|
||||||
|
want string
|
||||||
|
wantErr bool
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"can send a bulk request",
|
||||||
|
fields{
|
||||||
|
Client: func() *httpc.Client {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", port), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
}(),
|
||||||
|
path: "v2/query",
|
||||||
|
},
|
||||||
|
args{
|
||||||
|
args: []hasura.RequestBody{
|
||||||
|
{
|
||||||
|
Type: "mssql_run_sql",
|
||||||
|
Args: hasura.PGRunSQLInput{
|
||||||
|
SQL: "select 1",
|
||||||
|
Source: mssqlSourceName,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
`[
|
||||||
|
{
|
||||||
|
"result_type": "TuplesOk",
|
||||||
|
"result": [
|
||||||
|
[
|
||||||
|
""
|
||||||
|
],
|
||||||
|
[
|
||||||
|
1
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]`,
|
||||||
|
false,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"can throw error on a bad request",
|
||||||
|
fields{
|
||||||
|
Client: func() *httpc.Client {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", port), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return c
|
||||||
|
}(),
|
||||||
|
path: "v1/query",
|
||||||
|
},
|
||||||
|
args{
|
||||||
|
args: []hasura.RequestBody{
|
||||||
|
{
|
||||||
|
Type: "run_sql",
|
||||||
|
Version: 0,
|
||||||
|
Args: hasura.PGRunSQLInput{
|
||||||
|
SQL: "select something crazy!",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "run_sql",
|
||||||
|
Version: 0,
|
||||||
|
Args: hasura.PGRunSQLInput{
|
||||||
|
SQL: "select 1",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
``,
|
||||||
|
true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
c := &Client{
|
||||||
|
Client: tt.fields.Client,
|
||||||
|
path: tt.fields.path,
|
||||||
|
}
|
||||||
|
got, err := c.Bulk(tt.args.args)
|
||||||
|
if tt.wantErr {
|
||||||
|
require.Error(t, err)
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
gotb, err := ioutil.ReadAll(got)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, tt.want, string(gotb))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -3,6 +3,7 @@ package v2query
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
@ -42,3 +43,21 @@ func (c *Client) Send(body interface{}) (*httpc.Response, io.Reader, error) {
|
|||||||
}
|
}
|
||||||
return resp, responseBody, nil
|
return resp, responseBody, nil
|
||||||
}
|
}
|
||||||
|
func (c *Client) Bulk(args []hasura.RequestBody) (io.Reader, error) {
|
||||||
|
body := hasura.RequestBody{
|
||||||
|
Type: "bulk",
|
||||||
|
Args: args,
|
||||||
|
}
|
||||||
|
req, err := c.NewRequest(http.MethodPost, c.path, body)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
responseBody := new(bytes.Buffer)
|
||||||
|
resp, err := c.LockAndDo(context.Background(), req, responseBody)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else if resp.StatusCode != http.StatusOK {
|
||||||
|
return nil, fmt.Errorf("bulk request failed: %v %v", resp.StatusCode, responseBody.String())
|
||||||
|
}
|
||||||
|
return responseBody, nil
|
||||||
|
}
|
||||||
|
@ -108,6 +108,13 @@ func (c *Client) LockAndDo(ctx context.Context, req *http.Request, v interface{}
|
|||||||
return c.Do(ctx, req, v)
|
return c.Do(ctx, req, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func hasJSONContentType(headers http.Header) bool {
|
||||||
|
if headers.Get("Content-Type") == "application/json" {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*Response, error) {
|
func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*Response, error) {
|
||||||
resp, err := c.BareDo(ctx, req)
|
resp, err := c.BareDo(ctx, req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -117,8 +124,10 @@ func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*Res
|
|||||||
switch v := v.(type) {
|
switch v := v.(type) {
|
||||||
case nil:
|
case nil:
|
||||||
case io.Writer:
|
case io.Writer:
|
||||||
|
if hasJSONContentType(resp.Header) {
|
||||||
// indent json response
|
// indent json response
|
||||||
respBodyBytes, err := ioutil.ReadAll(resp.Body)
|
var respBodyBytes []byte
|
||||||
|
respBodyBytes, err = ioutil.ReadAll(resp.Body)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return resp, err
|
return resp, err
|
||||||
}
|
}
|
||||||
@ -129,6 +138,9 @@ func (c *Client) Do(ctx context.Context, req *http.Request, v interface{}) (*Res
|
|||||||
}
|
}
|
||||||
// copy it to writer
|
// copy it to writer
|
||||||
_, err = io.Copy(v, &buf)
|
_, err = io.Copy(v, &buf)
|
||||||
|
} else {
|
||||||
|
_, err = io.Copy(v, resp.Body)
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
decErr := json.NewDecoder(resp.Body).Decode(v)
|
decErr := json.NewDecoder(resp.Body).Decode(v)
|
||||||
if decErr == io.EOF {
|
if decErr == io.EOF {
|
||||||
|
@ -117,8 +117,6 @@ type Driver interface {
|
|||||||
|
|
||||||
SchemaDriver
|
SchemaDriver
|
||||||
|
|
||||||
SeedDriver
|
|
||||||
|
|
||||||
SettingsDriver
|
SettingsDriver
|
||||||
|
|
||||||
Query(data interface{}) error
|
Query(data interface{}) error
|
||||||
|
@ -1,55 +0,0 @@
|
|||||||
package hasuradb
|
|
||||||
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
|
|
||||||
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
|
||||||
|
|
||||||
"github.com/pkg/errors"
|
|
||||||
)
|
|
||||||
|
|
||||||
func (h *HasuraDB) ApplySeed(m interface{}) error {
|
|
||||||
resp, body, err := h.genericQueryRequest(m)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
v, ok := body.(*bytes.Buffer)
|
|
||||||
if ok {
|
|
||||||
return errors.New(v.String())
|
|
||||||
}
|
|
||||||
return fmt.Errorf("applying %v failed with code %d", m, resp.StatusCode)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (h *HasuraDB) ExportDataDump(fromTables []string, sourceName string, sourceKind hasura.SourceKind) ([]byte, error) {
|
|
||||||
switch sourceKind {
|
|
||||||
case hasura.SourceKindPG:
|
|
||||||
pgDumpOpts := []string{"--no-owner", "--no-acl", "--data-only", "--column-inserts"}
|
|
||||||
for _, table := range fromTables {
|
|
||||||
pgDumpOpts = append(pgDumpOpts, "--table", table)
|
|
||||||
}
|
|
||||||
query := SchemaDump{
|
|
||||||
Opts: pgDumpOpts,
|
|
||||||
CleanOutput: true,
|
|
||||||
Database: sourceName,
|
|
||||||
}
|
|
||||||
|
|
||||||
resp, body, err := h.sendSchemaDumpQuery(query)
|
|
||||||
if err != nil {
|
|
||||||
h.logger.Debug(err)
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
h.logger.Debug("exporting data: ", string(body))
|
|
||||||
|
|
||||||
if resp.StatusCode != http.StatusOK {
|
|
||||||
return nil, NewHasuraError(body, h.config.isCMD)
|
|
||||||
}
|
|
||||||
|
|
||||||
return body, nil
|
|
||||||
}
|
|
||||||
return nil, fmt.Errorf("not supported for source %s of type %v", sourceName, sourceKind)
|
|
||||||
}
|
|
@ -1,8 +0,0 @@
|
|||||||
package database
|
|
||||||
|
|
||||||
import "github.com/hasura/graphql-engine/cli/internal/hasura"
|
|
||||||
|
|
||||||
type SeedDriver interface {
|
|
||||||
ApplySeed(m interface{}) error
|
|
||||||
ExportDataDump(tableNames []string, sourceName string, sourceKind hasura.SourceKind) ([]byte, error)
|
|
||||||
}
|
|
@ -12,7 +12,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
|
||||||
"sync"
|
"sync"
|
||||||
"text/tabwriter"
|
"text/tabwriter"
|
||||||
"time"
|
"time"
|
||||||
@ -1803,32 +1802,6 @@ func (m *Migrate) readDownFromVersion(from int64, to int64, ret chan<- interface
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (m *Migrate) ApplySeed(q interface{}) error {
|
|
||||||
return m.databaseDrv.ApplySeed(q)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (m *Migrate) ExportDataDump(tableNames []string, sourceName string, sourceKind hasura.SourceKind) ([]byte, error) {
|
|
||||||
// to support tables starting with capital letters
|
|
||||||
modifiedTableNames := make([]string, len(tableNames))
|
|
||||||
|
|
||||||
for idx, val := range tableNames {
|
|
||||||
split := strings.Split(val, ".")
|
|
||||||
splitLen := len(split)
|
|
||||||
|
|
||||||
if splitLen != 1 && splitLen != 2 {
|
|
||||||
return nil, fmt.Errorf(`invalid schema/table provided "%s"`, val)
|
|
||||||
}
|
|
||||||
|
|
||||||
if splitLen == 2 {
|
|
||||||
modifiedTableNames[idx] = fmt.Sprintf(`"%s"."%s"`, split[0], split[1])
|
|
||||||
} else {
|
|
||||||
modifiedTableNames[idx] = fmt.Sprintf(`"%s"`, val)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return m.databaseDrv.ExportDataDump(modifiedTableNames, sourceName, sourceKind)
|
|
||||||
}
|
|
||||||
|
|
||||||
func printDryRunStatus(migrations []*Migration) *bytes.Buffer {
|
func printDryRunStatus(migrations []*Migration) *bytes.Buffer {
|
||||||
out := new(tabwriter.Writer)
|
out := new(tabwriter.Writer)
|
||||||
buf := &bytes.Buffer{}
|
buf := &bytes.Buffer{}
|
||||||
|
@ -5,67 +5,105 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
|
|
||||||
"github.com/hasura/graphql-engine/cli"
|
"github.com/hasura/graphql-engine/cli"
|
||||||
|
|
||||||
"github.com/hasura/graphql-engine/cli/migrate"
|
|
||||||
"github.com/hasura/graphql-engine/cli/migrate/database/hasuradb"
|
|
||||||
"github.com/pkg/errors"
|
"github.com/pkg/errors"
|
||||||
|
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ApplySeedsToDatabase will read all .sql files in the given
|
func hasAllowedSeedFileExtensions(filename string) error {
|
||||||
// directory and apply it to hasura
|
extension := filepath.Ext(filename)
|
||||||
func ApplySeedsToDatabase(ec *cli.ExecutionContext, fs afero.Fs, m *migrate.Migrate, filenames []string, database string) error {
|
allowedExtensions := []string{".sql", ".SQL"}
|
||||||
seedQuery := hasuradb.HasuraInterfaceBulk{
|
for _, allowedExtension := range allowedExtensions {
|
||||||
Type: "bulk",
|
if allowedExtension == extension {
|
||||||
Args: make([]interface{}, 0),
|
return nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fmt.Errorf("expected extension to be one of %v but got %s on file %s", allowedExtensions, extension, filename)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ApplySeedsToDatabase will read all .sql files in the given
|
||||||
|
// directory and apply it to hasura
|
||||||
|
func (d *Driver) ApplySeedsToDatabase(fs afero.Fs, rootSeedsDirectory string, filenames []string, source cli.Source) error {
|
||||||
|
seedsDirectory := rootSeedsDirectory
|
||||||
|
if len(source.Name) > 0 {
|
||||||
|
seedsDirectory = filepath.Join(rootSeedsDirectory, source.Name)
|
||||||
|
}
|
||||||
|
getSourceKind := func(source cli.Source) hasura.SourceKind {
|
||||||
|
if len(source.Name) == 0 {
|
||||||
|
return hasura.SourceKindPG
|
||||||
|
}
|
||||||
|
return source.Kind
|
||||||
|
}
|
||||||
|
var sqlAsBytes [][]byte
|
||||||
if len(filenames) > 0 {
|
if len(filenames) > 0 {
|
||||||
for _, filename := range filenames {
|
for _, filename := range filenames {
|
||||||
absFilename := filepath.Join(ec.SeedsDirectory, filename)
|
absFilename := filepath.Join(seedsDirectory, filename)
|
||||||
|
if err := hasAllowedSeedFileExtensions(absFilename); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
b, err := afero.ReadFile(fs, absFilename)
|
b, err := afero.ReadFile(fs, absFilename)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "error opening file")
|
return errors.Wrap(err, "error opening file")
|
||||||
}
|
}
|
||||||
q := hasuradb.HasuraInterfaceQuery{
|
sqlAsBytes = append(sqlAsBytes, b)
|
||||||
Type: "run_sql",
|
|
||||||
Args: hasuradb.RunSQLInput{
|
|
||||||
Source: database,
|
|
||||||
SQL: string(b),
|
|
||||||
},
|
|
||||||
}
|
|
||||||
seedQuery.Args = append(seedQuery.Args, q)
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
err := afero.Walk(fs, ec.SeedsDirectory, func(path string, file os.FileInfo, err error) error {
|
err := afero.Walk(fs, seedsDirectory, func(path string, file os.FileInfo, err error) error {
|
||||||
if file == nil || err != nil {
|
if file == nil || err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if !file.IsDir() && filepath.Ext(file.Name()) == ".sql" {
|
if err := hasAllowedSeedFileExtensions(file.Name()); err == nil && !file.IsDir() {
|
||||||
b, err := afero.ReadFile(fs, path)
|
b, err := afero.ReadFile(fs, path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "error opening file")
|
return errors.Wrap(err, "error opening file")
|
||||||
}
|
}
|
||||||
q := hasuradb.HasuraInterfaceQuery{
|
sqlAsBytes = append(sqlAsBytes, b)
|
||||||
Type: "run_sql",
|
|
||||||
Args: hasuradb.RunSQLInput{
|
|
||||||
SQL: string(b),
|
|
||||||
Source: database,
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
seedQuery.Args = append(seedQuery.Args, q)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
})
|
})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return errors.Wrap(err, "error walking the directory path")
|
return errors.Wrap(err, "error walking the directory path")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if len(seedQuery.Args) == 0 {
|
var args []hasura.RequestBody
|
||||||
return fmt.Errorf("no SQL files found in %s", ec.SeedsDirectory)
|
sourceKind := getSourceKind(source)
|
||||||
|
switch sourceKind {
|
||||||
|
case hasura.SourceKindPG:
|
||||||
|
for _, sql := range sqlAsBytes {
|
||||||
|
request := hasura.RequestBody{
|
||||||
|
Type: "run_sql",
|
||||||
|
Args: hasura.PGRunSQLInput{
|
||||||
|
SQL: string(sql),
|
||||||
|
Source: source.Name,
|
||||||
|
},
|
||||||
}
|
}
|
||||||
return m.ApplySeed(seedQuery)
|
args = append(args, request)
|
||||||
|
}
|
||||||
|
case hasura.SourceKindMSSQL:
|
||||||
|
for _, sql := range sqlAsBytes {
|
||||||
|
request := hasura.RequestBody{
|
||||||
|
Type: "mssql_run_sql",
|
||||||
|
Args: hasura.MSSQLRunSQLInput{
|
||||||
|
SQL: string(sql),
|
||||||
|
Source: source.Name,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
args = append(args, request)
|
||||||
|
}
|
||||||
|
default:
|
||||||
|
return fmt.Errorf("database %s of kind %s is not supported", source.Name, source.Kind)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(args) == 0 {
|
||||||
|
return fmt.Errorf("no SQL files found in %s", seedsDirectory)
|
||||||
|
}
|
||||||
|
_, err := d.SendBulk(args)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
150
cli/seed/apply_test.go
Normal file
150
cli/seed/apply_test.go
Normal file
@ -0,0 +1,150 @@
|
|||||||
|
package seed
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura/pgdump"
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/testutil"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura/v1query"
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/httpc"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli"
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
|
"github.com/spf13/afero"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDriver_ApplySeedsToDatabase(t *testing.T) {
|
||||||
|
port13, teardown := testutil.StartHasura(t, "v1.3.3")
|
||||||
|
defer teardown()
|
||||||
|
portLatest, teardown := testutil.StartHasura(t, testutil.HasuraVersion)
|
||||||
|
defer teardown()
|
||||||
|
type fields struct {
|
||||||
|
SendBulk sendBulk
|
||||||
|
PGDumpClient hasura.PGDump
|
||||||
|
}
|
||||||
|
type args struct {
|
||||||
|
fs afero.Fs
|
||||||
|
rootSeedsDirectory string
|
||||||
|
filenames []string
|
||||||
|
source cli.Source
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
fields fields
|
||||||
|
args args
|
||||||
|
wantErr bool
|
||||||
|
// functions which should be run before the test
|
||||||
|
// possibly to prepare test fixtures maybe
|
||||||
|
before func(t *testing.T)
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"can apply seeds in v1.3.3",
|
||||||
|
fields{
|
||||||
|
func() sendBulk {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", port13), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return v1query.New(c, "v1/query").Bulk
|
||||||
|
}(),
|
||||||
|
func() hasura.PGDump {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", port13), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return pgdump.New(c, "v1alpha1/pg_dump")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
args{
|
||||||
|
fs: afero.NewOsFs(),
|
||||||
|
rootSeedsDirectory: "testdata/seeds",
|
||||||
|
filenames: []string{},
|
||||||
|
},
|
||||||
|
false,
|
||||||
|
nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"can apply seeds in latest",
|
||||||
|
fields{
|
||||||
|
func() sendBulk {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", portLatest), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return v1query.New(c, "v2/query").Bulk
|
||||||
|
}(),
|
||||||
|
func() hasura.PGDump {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", portLatest), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return pgdump.New(c, "v1alpha1/pg_dump")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
args{
|
||||||
|
fs: afero.NewOsFs(),
|
||||||
|
rootSeedsDirectory: "testdata/seeds",
|
||||||
|
filenames: []string{},
|
||||||
|
},
|
||||||
|
false,
|
||||||
|
nil,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"can apply seeds from files",
|
||||||
|
fields{
|
||||||
|
func() sendBulk {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", portLatest), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return v1query.New(c, "v2/query").Bulk
|
||||||
|
}(),
|
||||||
|
func() hasura.PGDump {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", portLatest), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return pgdump.New(c, "v1alpha1/pg_dump")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
args{
|
||||||
|
fs: afero.NewOsFs(),
|
||||||
|
rootSeedsDirectory: "testdata/seeds",
|
||||||
|
filenames: []string{
|
||||||
|
"articles.sql",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
false,
|
||||||
|
func(t *testing.T) {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", portLatest), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
v1QueryClient := v1query.New(c, "v2/query")
|
||||||
|
_, err = v1QueryClient.PGRunSQL(hasura.PGRunSQLInput{
|
||||||
|
SQL: "DROP TABLE articles",
|
||||||
|
Source: "default",
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
d := &Driver{
|
||||||
|
SendBulk: tt.fields.SendBulk,
|
||||||
|
PGDumpClient: tt.fields.PGDumpClient,
|
||||||
|
}
|
||||||
|
if tt.before != nil {
|
||||||
|
tt.before(t)
|
||||||
|
}
|
||||||
|
if err := d.ApplySeedsToDatabase(tt.args.fs, tt.args.rootSeedsDirectory, tt.args.filenames, tt.args.source); (err != nil) != tt.wantErr {
|
||||||
|
t.Errorf("ApplySeedsToDatabase() error = %v, wantErr %v", err, tt.wantErr)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -8,8 +8,11 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
|
|
||||||
"github.com/spf13/afero"
|
"github.com/spf13/afero"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -48,3 +51,38 @@ func CreateSeedFile(fs afero.Fs, opts CreateSeedOpts) (*string, error) {
|
|||||||
|
|
||||||
return &fullFilePath, nil
|
return &fullFilePath, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (d *Driver) ExportDatadump(tableNames []string, sourceName string) (io.Reader, error) {
|
||||||
|
// to support tables starting with capital letters
|
||||||
|
modifiedTableNames := make([]string, len(tableNames))
|
||||||
|
|
||||||
|
for idx, val := range tableNames {
|
||||||
|
split := strings.Split(val, ".")
|
||||||
|
splitLen := len(split)
|
||||||
|
|
||||||
|
if splitLen != 1 && splitLen != 2 {
|
||||||
|
return nil, fmt.Errorf(`invalid schema/table provided "%s"`, val)
|
||||||
|
}
|
||||||
|
|
||||||
|
if splitLen == 2 {
|
||||||
|
modifiedTableNames[idx] = fmt.Sprintf(`"%s"."%s"`, split[0], split[1])
|
||||||
|
} else {
|
||||||
|
modifiedTableNames[idx] = fmt.Sprintf(`"%s"`, val)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pgDumpOpts := []string{"--no-owner", "--no-acl", "--data-only", "--column-inserts"}
|
||||||
|
for _, table := range modifiedTableNames {
|
||||||
|
pgDumpOpts = append(pgDumpOpts, "--table", table)
|
||||||
|
}
|
||||||
|
request := hasura.PGDumpRequest{
|
||||||
|
Opts: pgDumpOpts,
|
||||||
|
CleanOutput: true,
|
||||||
|
SourceName: sourceName,
|
||||||
|
}
|
||||||
|
response, err := d.PGDumpClient.Send(request)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return response, nil
|
||||||
|
}
|
||||||
|
110
cli/seed/create_test.go
Normal file
110
cli/seed/create_test.go
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
package seed
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura/pgdump"
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura/v1query"
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/httpc"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/testutil"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestDriver_ExportDatadump(t *testing.T) {
|
||||||
|
port, teardown := testutil.StartHasura(t, testutil.HasuraVersion)
|
||||||
|
defer teardown()
|
||||||
|
type fields struct {
|
||||||
|
SendBulk sendBulk
|
||||||
|
PGDumpClient hasura.PGDump
|
||||||
|
}
|
||||||
|
type args struct {
|
||||||
|
tableNames []string
|
||||||
|
sourceName string
|
||||||
|
}
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
fields fields
|
||||||
|
args args
|
||||||
|
want string
|
||||||
|
wantErr bool
|
||||||
|
before func(t *testing.T)
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
"can export data dump",
|
||||||
|
fields{
|
||||||
|
func() sendBulk {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", port), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return v1query.New(c, "v2/query").Bulk
|
||||||
|
}(),
|
||||||
|
func() hasura.PGDump {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", port), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
return pgdump.New(c, "v1alpha1/pg_dump")
|
||||||
|
}(),
|
||||||
|
},
|
||||||
|
args{
|
||||||
|
tableNames: []string{"articles", "authors"},
|
||||||
|
sourceName: "default",
|
||||||
|
},
|
||||||
|
`INSERT INTO public.articles (id, title, content, rating, author_id) VALUES (1, 'test1', 'test1', 1, 4);
|
||||||
|
INSERT INTO public.articles (id, title, content, rating, author_id) VALUES (2, 'test2', 'test1', 1, 4);
|
||||||
|
INSERT INTO public.articles (id, title, content, rating, author_id) VALUES (3, 'test3', 'test1', 1, 4);
|
||||||
|
INSERT INTO public.authors (id, name) VALUES (1, 'test1');
|
||||||
|
INSERT INTO public.authors (id, name) VALUES (4, 'test2');
|
||||||
|
SELECT pg_catalog.setval('public.articles_author_id_seq', 1, false);
|
||||||
|
SELECT pg_catalog.setval('public.articles_id_seq', 1, false);
|
||||||
|
SELECT pg_catalog.setval('public.authors_id_seq', 1, false);
|
||||||
|
`,
|
||||||
|
false,
|
||||||
|
func(t *testing.T) {
|
||||||
|
c, err := httpc.New(nil, fmt.Sprintf("http://localhost:%s/", port), nil)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
q := v1query.New(c, "v2/query")
|
||||||
|
b, err := ioutil.ReadFile("testdata/seeds/articles.sql")
|
||||||
|
require.NoError(t, err)
|
||||||
|
_, err = q.PGRunSQL(hasura.PGRunSQLInput{
|
||||||
|
SQL: string(b),
|
||||||
|
})
|
||||||
|
require.NoError(t, err)
|
||||||
|
b, err = ioutil.ReadFile("testdata/seeds/authors.sql")
|
||||||
|
require.NoError(t, err)
|
||||||
|
_, err = q.PGRunSQL(hasura.PGRunSQLInput{
|
||||||
|
SQL: string(b),
|
||||||
|
})
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
d := &Driver{
|
||||||
|
SendBulk: tt.fields.SendBulk,
|
||||||
|
PGDumpClient: tt.fields.PGDumpClient,
|
||||||
|
}
|
||||||
|
if tt.before != nil {
|
||||||
|
tt.before(t)
|
||||||
|
}
|
||||||
|
got, err := d.ExportDatadump(tt.args.tableNames, tt.args.sourceName)
|
||||||
|
if tt.wantErr {
|
||||||
|
require.Error(t, err)
|
||||||
|
} else {
|
||||||
|
require.NoError(t, err)
|
||||||
|
gotb, err := ioutil.ReadAll(got)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, tt.want, string(gotb))
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
17
cli/seed/seed.go
Normal file
17
cli/seed/seed.go
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
package seed
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
|
||||||
|
"github.com/hasura/graphql-engine/cli/internal/hasura"
|
||||||
|
)
|
||||||
|
|
||||||
|
type sendBulk func([]hasura.RequestBody) (io.Reader, error)
|
||||||
|
type Driver struct {
|
||||||
|
SendBulk sendBulk
|
||||||
|
PGDumpClient hasura.PGDump
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewDriver(s sendBulk, pgDumpClient hasura.PGDump) *Driver {
|
||||||
|
return &Driver{s, pgDumpClient}
|
||||||
|
}
|
14
cli/seed/testdata/seeds/articles.sql
vendored
Normal file
14
cli/seed/testdata/seeds/articles.sql
vendored
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
CREATE TABLE articles
|
||||||
|
(
|
||||||
|
id serial NOT NULL,
|
||||||
|
title text NOT NULL,
|
||||||
|
content text NOT NULL,
|
||||||
|
rating integer NOT NULL,
|
||||||
|
author_id serial NOT NULL,
|
||||||
|
PRIMARY KEY (id)
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO articles (id, title, content, rating, author_id)
|
||||||
|
VALUES (1, 'test1', 'test1', 1, 4),
|
||||||
|
(2, 'test2', 'test1', 1, 4),
|
||||||
|
(3, 'test3', 'test1', 1, 4);
|
10
cli/seed/testdata/seeds/authors.sql
vendored
Normal file
10
cli/seed/testdata/seeds/authors.sql
vendored
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
CREATE TABLE authors
|
||||||
|
(
|
||||||
|
id SERIAL PRIMARY KEY,
|
||||||
|
name TEXT
|
||||||
|
);
|
||||||
|
|
||||||
|
INSERT INTO authors(id, name)
|
||||||
|
VALUES (1, 'test1'),
|
||||||
|
(4, 'test2');
|
||||||
|
|
Loading…
Reference in New Issue
Block a user