cli: split remote schema permissions metadata into seperate files

closes https://github.com/hasura/graphql-engine/issues/7033

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/1877
Co-authored-by: Aravind K P <8335904+scriptonist@users.noreply.github.com>
GitOrigin-RevId: 3764db9102d41f67a43f76aa79d28eb5485579ca
This commit is contained in:
Kali Vara Purushotham Santhati 2021-10-14 12:22:03 +05:30 committed by hasura-bot
parent ef2e1ae935
commit 1e5a48286f
61 changed files with 735 additions and 45 deletions

View File

@ -2,8 +2,10 @@
## Next release
(Add entries below in the order of server, console, cli, docs, others)
- console: design cleanup Modify and Add Table forms (close #7454)
- server: fix bug which recreated event triggers every time the graphql-engine started up
- console: design cleanup Modify and Add Table forms (close #7454)
- cli: split remote schema permissions metadata into seperate files (#7033)
### Function field names customization (#7405)
It is now possible to specify the GraphQL names of tracked SQL functions in

View File

@ -2,6 +2,7 @@ package v2
import (
"bytes"
"fmt"
"io/ioutil"
"os"
"path/filepath"
@ -123,11 +124,22 @@ func TestMetadataCmd(t *testing.T, ec *cli.ExecutionContext) {
for _, file := range files {
name := file.Name()
expectedByt, err := ioutil.ReadFile(filepath.Join(tc.expectedMetadataFolder, name))
expectedFileName := filepath.Join(tc.expectedMetadataFolder, name)
actualFileName := filepath.Join(ec.MetadataDir, name)
fs, err := os.Stat(expectedFileName)
if err != nil {
t.Fatalf("%s: unable to get info about the expected metadata file %s, got %v", tc.name, name, err)
}
if fs.IsDir() {
// remote_schemas -> remote_schemas/remote_schemas.yaml
expectedFileName = filepath.Join(expectedFileName, fmt.Sprintf("%s.yaml", name))
actualFileName = filepath.Join(actualFileName, fmt.Sprintf("%s.yaml", name))
}
expectedByt, err := ioutil.ReadFile(expectedFileName)
if err != nil {
t.Fatalf("%s: unable to read expected metadata file %s, got %v", tc.name, name, err)
}
actualByt, err := ioutil.ReadFile(filepath.Join(ec.MetadataDir, name))
actualByt, err := ioutil.ReadFile(actualFileName)
if err != nil {
t.Fatalf("%s: unable to read actual metadata file %s, got %v", tc.name, name, err)
}

View File

@ -1,16 +1,38 @@
package remoteschemas
import (
"bytes"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"github.com/hasura/graphql-engine/cli/v2/internal/metadataobject"
"github.com/goccy/go-yaml"
"github.com/hasura/graphql-engine/cli/v2"
"github.com/hasura/graphql-engine/cli/v2/internal/metadataobject"
"github.com/hasura/graphql-engine/cli/v2/internal/metadatautil"
"github.com/sirupsen/logrus"
"gopkg.in/yaml.v2"
goyaml "gopkg.in/yaml.v2"
v3yaml "gopkg.in/yaml.v3"
)
const (
remoteSchemasDirectory string = "remote_schemas"
remoteSchemasPermissionsDirectory string = "permissions"
remoteSchemaPermissionsFile string = "permissions.yaml"
remoteSchemaPermissionSchemasDirectory string = "schemas"
)
type RemoteSchema struct {
Name string `yaml:"name"`
Definition interface{} `yaml:"definition"`
Comment interface{} `yaml:"comment"`
Permission interface{} `yaml:"permissions"`
}
type SchemaDefinition struct {
Schema string `yaml:"schema"`
}
type RemoteSchemaConfig struct {
MetadataDir string
@ -34,52 +56,164 @@ func (r *RemoteSchemaConfig) CreateFiles() error {
if err != nil {
return err
}
err = ioutil.WriteFile(filepath.Join(r.MetadataDir, r.Filename()), data, 0644)
path := filepath.Join(r.MetadataDir, remoteSchemasDirectory, r.Filename())
if err := os.MkdirAll(filepath.Dir(path), 0744); err != nil {
return err
}
err = ioutil.WriteFile(path, data, 0644)
if err != nil {
return err
}
return nil
}
func (r *RemoteSchemaConfig) Build(metadata *yaml.MapSlice) metadataobject.ErrParsingMetadataObject {
data, err := ioutil.ReadFile(filepath.Join(r.MetadataDir, r.Filename()))
func (r *RemoteSchemaConfig) Build(metadata *goyaml.MapSlice) metadataobject.ErrParsingMetadataObject {
newRemoteSchemaFile := filepath.Join(r.MetadataDir, remoteSchemasDirectory, r.Filename())
if _, err := os.Stat(newRemoteSchemaFile); os.IsNotExist(err) {
// if metadata/remotes_schemas/remote_schema.yaml is not present
// fall back to old workflow where remote schemas used to stored in metadata/remote_schemas.yaml
// if this file exists respect this and read metadata from here
data, err := ioutil.ReadFile(filepath.Join(r.MetadataDir, r.Filename()))
if err != nil {
return r.error(err)
}
item := goyaml.MapItem{
Key: "remote_schemas",
}
var obj []goyaml.MapSlice
err = goyaml.Unmarshal(data, &obj)
if err != nil {
return r.error(err)
}
if len(obj) != 0 {
item.Value = obj
*metadata = append(*metadata, item)
}
return nil
}
data, err := ioutil.ReadFile(newRemoteSchemaFile)
if err != nil {
return r.error(err)
}
item := yaml.MapItem{
Key: "remote_schemas",
var remoteSchemas []*RemoteSchema
if err := v3yaml.Unmarshal(data, &remoteSchemas); err != nil {
return r.error(fmt.Errorf("parsing error: %w", err))
}
var obj []yaml.MapSlice
err = yaml.Unmarshal(data, &obj)
if err != nil {
return r.error(err)
if len(remoteSchemas) == 0 {
return nil
}
if len(obj) != 0 {
item.Value = obj
*metadata = append(*metadata, item)
for idx, remoteSchema := range remoteSchemas {
permissionPath := fmt.Sprintf("$[%d].permissions", idx)
permissionsPath, err := yaml.PathString(permissionPath)
if err != nil {
return r.error(fmt.Errorf("parsing error: %w", err))
}
PermissionNode, err := permissionsPath.ReadNode(bytes.NewReader(data))
if err == nil {
tableNodeBytes, err := ioutil.ReadAll(PermissionNode)
if err != nil {
return r.error(err)
}
var permissions interface{}
err = v3yaml.Unmarshal(tableNodeBytes, metadatautil.NewYamlDecoder(
metadatautil.YamlDecoderOpts{
IncludeTagBaseDirectory: filepath.Join(r.MetadataDir, remoteSchemasDirectory),
},
&permissions,
))
if err != nil {
return r.error(err)
}
remoteSchema.Permission = permissions
} else {
r.logger.Debugf("building metadata: permission node not found for %s", remoteSchema.Name)
}
}
item := goyaml.MapItem{
Key: "remote_schemas",
Value: []yaml.MapSlice{},
}
item.Value = remoteSchemas
*metadata = append(*metadata, item)
return nil
}
func (r *RemoteSchemaConfig) Export(metadata yaml.MapSlice) (map[string][]byte, metadataobject.ErrParsingMetadataObject) {
var remoteSchemas interface{}
for _, item := range metadata {
k, ok := item.Key.(string)
if !ok || k != "remote_schemas" {
continue
func (r *RemoteSchemaConfig) Export(metadata goyaml.MapSlice) (map[string][]byte, metadataobject.ErrParsingMetadataObject) {
oldRemoteSchemaFile := filepath.Join(r.MetadataDir, r.Filename())
if _, err := os.Stat(oldRemoteSchemaFile); !os.IsNotExist(err) {
err := os.Remove(oldRemoteSchemaFile)
if err != nil {
return nil, r.error(err, "error while removing old remote schema file in metadata directory")
}
remoteSchemas = item.Value
}
if remoteSchemas == nil {
remoteSchemas = make([]interface{}, 0)
metadataBytes, err := goyaml.Marshal(metadata)
if err != nil {
return nil, r.error(err)
}
files := map[string][]byte{}
var remoteSchemas []*RemoteSchema
remoteSchemaPath, err := yaml.PathString("$.remote_schemas")
if err != nil {
return nil, r.error(err)
}
if err := remoteSchemaPath.Read(bytes.NewReader(metadataBytes), &remoteSchemas); err != nil {
r.logger.Debug("reading remote schema from metadata", err)
}
for idx, remoteSchema := range remoteSchemas {
permissions := make([]struct {
Role string `yaml:"role"`
Definition SchemaDefinition `yaml:"definition"`
}, 0)
permissionPath := fmt.Sprintf("$.remote_schemas[%d].permissions", idx)
permissionsPath, err := yaml.PathString(permissionPath)
if err != nil {
return nil, r.error(err)
}
if err := permissionsPath.Read(bytes.NewReader(metadataBytes), &permissions); err != nil {
r.logger.Debug("reading remote schema permissions from metadata", err)
}
for idy, role := range permissions {
contents := role.Definition.Schema
roleFileName := fmt.Sprintf("%s.graphql", role.Role)
roleAbsFilePath := filepath.ToSlash(filepath.Join(r.MetadataDir, remoteSchemasDirectory, remoteSchema.Name, remoteSchemasPermissionsDirectory, remoteSchemaPermissionSchemasDirectory, roleFileName))
files[roleAbsFilePath] = []byte(contents)
role.Definition.Schema = fmt.Sprintf("%s %s", "!include", filepath.Join(remoteSchemaPermissionSchemasDirectory, roleFileName))
permissions[idy] = role
}
rsPermissionsFile := filepath.Join(remoteSchema.Name, remoteSchemasPermissionsDirectory, remoteSchemaPermissionsFile)
rsPermissionsFileAbsPath := filepath.ToSlash(filepath.Join(r.MetadataDir, remoteSchemasDirectory, rsPermissionsFile))
data, err := yaml.Marshal(permissions)
if err != nil {
return nil, r.error(err)
}
files[rsPermissionsFileAbsPath] = data
if len(permissions) != 0 {
rSPermissionIncludeTag := fmt.Sprintf("%s %s", "!include", rsPermissionsFile)
remoteSchema.Permission = rSPermissionIncludeTag
}
}
data, err := yaml.Marshal(remoteSchemas)
if err != nil {
return nil, r.error(err)
}
return map[string][]byte{
filepath.ToSlash(filepath.Join(r.MetadataDir, r.Filename())): data,
}, nil
remoteSchemaFile := filepath.Join(r.MetadataDir, remoteSchemasDirectory, r.Filename())
files[filepath.ToSlash(remoteSchemaFile)] = data
return files, nil
}
func (r *RemoteSchemaConfig) Key() string {

View File

@ -0,0 +1,152 @@
package remoteschemas
import (
"io/ioutil"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/stretchr/testify/assert"
"gopkg.in/yaml.v2"
"github.com/sirupsen/logrus"
)
func TestRemoteSchemaConfig_Export(t *testing.T) {
type fields struct {
MetadataDir string
logger *logrus.Logger
}
type args struct {
metadata yaml.MapSlice
}
tests := []struct {
name string
fields fields
args args
want map[string][]byte
wantErr bool
}{
{
"can create remote schemas metadata files",
fields{
MetadataDir: "./metadata",
logger: logrus.New(),
},
args{
metadata: func() yaml.MapSlice {
var metadata yaml.MapSlice
jsonb, err := ioutil.ReadFile("testdata/metadata.json")
assert.NoError(t, err)
assert.NoError(t, yaml.Unmarshal(jsonb, &metadata))
return metadata
}(),
},
map[string][]byte{
"metadata/remote_schemas/remote_schemas.yaml": []byte(
`- name: tt
definition:
timeout_seconds: 60
url: http://host.docker.internal:4000/graphql
comment: ""
permissions: "!include tt/permissions/permissions.yaml"
`),
"metadata/remote_schemas/tt/permissions/permissions.yaml": []byte(
`- role: manager
definition:
schema: "!include schemas/manager.graphql"
- role: user
definition:
schema: "!include schemas/user.graphql"
`),
"metadata/remote_schemas/tt/permissions/schemas/manager.graphql": []byte("schema { query: Query }\ntype Query { hello: String\n}"),
"metadata/remote_schemas/tt/permissions/schemas/user.graphql": []byte("schema { query: Query }\ntype Query { hello: String\n}"),
},
false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tc := &RemoteSchemaConfig{
MetadataDir: tt.fields.MetadataDir,
logger: tt.fields.logger,
}
got, err := tc.Export(tt.args.metadata)
if (err != nil) != tt.wantErr {
t.Fatalf("Export() error = %v, wantErr %v", err, tt.wantErr)
return
}
var wantContent = map[string]string{}
var gotContent = map[string]string{}
for k, v := range got {
gotContent[k] = string(v)
}
for k, v := range tt.want {
wantContent[k] = string(v)
}
if diff := cmp.Diff(wantContent, gotContent); diff != "" {
t.Errorf("Export() mismatch (-want +got):\n%s", diff)
}
})
}
}
func TestRemoteSchemaConfig_Build(t *testing.T) {
type fields struct {
MetadataDir string
logger *logrus.Logger
}
type args struct {
metadata *yaml.MapSlice
}
tests := []struct {
name string
fields fields
args args
want string
wantErr bool
}{
{
"can build remote schema metadata files from json",
fields{
MetadataDir: "testdata/metadata",
logger: logrus.New(),
},
args{
metadata: new(yaml.MapSlice),
},
`remote_schemas:
- name: tt
definition:
timeout_seconds: 60
url: http://host.docker.internal:4000/graphql
comment: ""
permissions:
- definition:
schema: |-
schema { query: Query }
type Query { hello: String }
role: manager
- definition:
schema: |-
schema { query: Query }
type Query { hello: String }
role: user
`,
false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tc := &RemoteSchemaConfig{
MetadataDir: tt.fields.MetadataDir,
logger: tt.fields.logger,
}
if err := tc.Build(tt.args.metadata); (err != nil) != tt.wantErr {
t.Fatalf("Build() error = %v, wantErr %v", err, tt.wantErr)
}
b, err := yaml.Marshal(tt.args.metadata)
assert.NoError(t, err)
assert.Equal(t, tt.want, string(b))
})
}
}

View File

@ -0,0 +1,27 @@
{
"version": 3,
"remote_schemas": [
{
"name": "tt",
"definition": {
"url": "http://host.docker.internal:4000/graphql",
"timeout_seconds": 60
},
"comment": "",
"permissions": [
{
"role": "manager",
"definition": {
"schema": "schema { query: Query }\ntype Query { hello: String\n}"
}
},
{
"role": "user",
"definition": {
"schema": "schema { query: Query }\ntype Query { hello: String\n}"
}
}
]
}
]
}

View File

@ -0,0 +1,6 @@
- name: tt
definition:
timeout_seconds: 60
url: http://host.docker.internal:4000/graphql
comment: ""
permissions: "!include tt/permissions/permissions.yaml"

View File

@ -0,0 +1,6 @@
- role: manager
definition:
schema: "!include schemas/manager.graphql"
- role: user
definition:
schema: "!include schemas/user.graphql"

View File

@ -0,0 +1,2 @@
schema { query: Query }
type Query { hello: String }

View File

@ -0,0 +1,2 @@
schema { query: Query }
type Query { hello: String }

View File

@ -8,6 +8,7 @@ import (
"path/filepath"
"github.com/hasura/graphql-engine/cli/v2/internal/metadataobject"
"github.com/hasura/graphql-engine/cli/v2/internal/metadatautil"
"github.com/sirupsen/logrus"
@ -98,8 +99,8 @@ func (t *SourceConfig) Build(metadata *goyaml.MapSlice) metadataobject.ErrParsin
return t.error(err)
}
var tablesKey interface{}
err = v3yaml.Unmarshal(tableNodeBytes, newSourcesYamlDecoder(
sourcesYamlDecoderOpts{
err = v3yaml.Unmarshal(tableNodeBytes, metadatautil.NewYamlDecoder(
metadatautil.YamlDecoderOpts{
IncludeTagBaseDirectory: filepath.Join(t.MetadataDir, sourcesDirectory),
},
&tablesKey,
@ -124,8 +125,8 @@ func (t *SourceConfig) Build(metadata *goyaml.MapSlice) metadataobject.ErrParsin
return t.error(err)
}
var functionsKey interface{}
err = v3yaml.Unmarshal(functionsNodeBytes, newSourcesYamlDecoder(
sourcesYamlDecoderOpts{
err = v3yaml.Unmarshal(functionsNodeBytes, metadatautil.NewYamlDecoder(
metadatautil.YamlDecoderOpts{
IncludeTagBaseDirectory: filepath.Join(t.MetadataDir, sourcesDirectory),
},
&functionsKey,

View File

@ -0,0 +1,189 @@
{
"version": 3,
"sources": [
{
"name": "default",
"kind": "postgres",
"tables": [
{
"table": {
"schema": "public",
"name": "t1"
},
"insert_permissions": [
{
"role": "user",
"permission": {
"check": {
"id": {
"_eq": "X-Hasura-User-Id"
}
},
"columns": [],
"backend_only": false
}
}
],
"event_triggers": [
{
"name": "t1",
"definition": {
"enable_manual": false,
"insert": {
"columns": "*"
}
},
"retry_conf": {
"num_retries": 0,
"interval_sec": 10,
"timeout_sec": 60
},
"webhook": "https://httpbin.org/post"
}
]
},
{
"table": {
"schema": "public",
"name": "t2"
}
}
],
"functions": [
{
"function": {
"schema": "public",
"name": "get_t1"
},
"some_amazing_stuff": {
"test1": "test",
"test2": "test"
},
"xyz_test": {
"test1": "test",
"test2": "test"
}
},
{
"function": {
"schema": "public",
"name": "get_t2"
}
}
],
"configuration": {
"connection_info": {
"database_url": {
"from_env": "HASURA_GRAPHQL_DATABASE_URL"
},
"isolation_level": "read-committed",
"pool_settings": {
"retries": 1,
"idle_timeout": 180,
"max_connections": 50
},
"use_prepared_statements": true
}
}
},
{
"name": "bg",
"kind": "bigquery",
"tables": [
{
"table": {
"dataset": "london_cycles",
"name": "cycle_hire"
}
},
{
"table": {
"dataset": "london_cycles",
"name": "cycle_stations"
}
}
],
"configuration": {
"service_account": {
"project_id": "some_test",
"client_email": "some_email",
"private_key": "the private key"
},
"project_id": "test_id",
"datasets": [
"t1"
]
}
}
],
"remote_schemas": [
{
"name": "countries",
"definition": {
"url": "https://countries.trevorblades.com/",
"timeout_seconds": 60,
"forward_client_headers": true
}
}
],
"actions": [
{
"name": "action1",
"definition": {
"handler": "http://localhost:3000",
"output_type": "SampleOutput",
"arguments": [
{
"name": "arg1",
"type": "SampleInput!"
}
],
"type": "mutation",
"kind": "synchronous"
}
},
{
"name": "action2",
"definition": {
"handler": "http://localhost:3000",
"output_type": "SampleOutput",
"arguments": [
{
"name": "arg1",
"type": "SampleInput!"
}
],
"type": "mutation",
"kind": "synchronous"
}
}
],
"custom_types": {
"input_objects": [
{
"name": "SampleInput",
"fields": [
{
"name": "username",
"type": "String!"
},
{
"name": "password",
"type": "String!"
}
]
}
],
"objects": [
{
"name": "SampleOutput",
"fields": [
{
"name": "accessToken",
"type": "String!"
}
]
}
]
}
}

View File

@ -0,0 +1,6 @@
actions: []
custom_types:
enums: []
input_objects: []
objects: []
scalars: []

View File

@ -0,0 +1,70 @@
- name: s1
kind: postgres
configuration:
connection_info:
database_url:
from_env: HASURA_GRAPHQL_DATABASE_URL
isolation_level: read-committed
pool_settings:
idle_timeout: 180
max_connections: 50
retries: 1
use_prepared_statements: true
tables: !include "s1/tables/tables.yaml"
functions: !include "s1/functions/functions.yaml"
- name: s2
kind: postgres
configuration:
connection_info:
database_url:
from_env: HASURA_GRAPHQL_DATABASE_URL
isolation_level: read-committed
pool_settings:
idle_timeout: 180
max_connections: 50
retries: 1
use_prepared_statements: true
tables: "!include s2/tables/tables.yaml"
functions: "!include s2/functions/functions.yaml"
- name: s 3
kind: postgres
configuration:
connection_info:
database_url:
from_env: HASURA_GRAPHQL_DATABASE_URL
isolation_level: read-committed
pool_settings:
idle_timeout: 180
max_connections: 50
retries: 1
use_prepared_statements: true
tables: !include "s 3/tables/tables.yaml"
functions: !include "s 3/functions/functions.yaml"
- name: s 4
kind: postgres
configuration:
connection_info:
database_url:
from_env: HASURA_GRAPHQL_DATABASE_URL
isolation_level: read-committed
pool_settings:
idle_timeout: 180
max_connections: 50
retries: 1
use_prepared_statements: true
tables: "!include s 4/tables/tables.yaml"
functions: "!include s 4/functions/functions.yaml"
- name: s 5
kind: postgres
configuration:
connection_info:
database_url:
from_env: HASURA_GRAPHQL_DATABASE_URL
isolation_level: read-committed
pool_settings:
idle_timeout: 180
max_connections: 50
retries: 1
use_prepared_statements: true
tables: "!include \"s 5/tables/tables.yaml\""
functions: "!include \"s 5/functions/functions.yaml\""

View File

@ -0,0 +1,2 @@
- !include "public_get_t1.yaml"
- !include "public_get_t2.yaml"

View File

@ -0,0 +1,3 @@
function:
name: get_t1
schema: public

View File

@ -0,0 +1,3 @@
function:
name: get_t2
schema: public

View File

@ -0,0 +1,3 @@
table:
name: t1
schema: public

View File

@ -0,0 +1,3 @@
table:
name: t2
schema: public

View File

@ -0,0 +1,2 @@
- !include "public_t1.yaml"
- !include "public_t2.yaml"

View File

@ -0,0 +1,2 @@
- !include "public_get_t1.yaml"
- !include "public_get_t2.yaml"

View File

@ -0,0 +1,3 @@
function:
name: get_t1
schema: public

View File

@ -0,0 +1,3 @@
function:
name: get_t2
schema: public

View File

@ -0,0 +1,3 @@
table:
name: t1
schema: public

View File

@ -0,0 +1,3 @@
table:
name: t2
schema: public

View File

@ -0,0 +1,2 @@
- !include "public_t1.yaml"
- !include "public_t2.yaml"

View File

@ -0,0 +1,2 @@
- !include "public_get_t1.yaml"
- !include "public_get_t2.yaml"

View File

@ -0,0 +1,3 @@
function:
name: get_t1
schema: public

View File

@ -0,0 +1,3 @@
function:
name: get_t2
schema: public

View File

@ -0,0 +1,3 @@
table:
name: t1
schema: public

View File

@ -0,0 +1,3 @@
table:
name: t2
schema: public

View File

@ -0,0 +1,2 @@
- !include "public_t1.yaml"
- !include "public_t2.yaml"

View File

@ -0,0 +1,2 @@
- !include "public_get_t1.yaml"
- !include "public_get_t2.yaml"

View File

@ -0,0 +1,3 @@
function:
name: get_t1
schema: public

View File

@ -0,0 +1,3 @@
function:
name: get_t2
schema: public

View File

@ -0,0 +1,3 @@
table:
name: t1
schema: public

View File

@ -0,0 +1,3 @@
table:
name: t2
schema: public

View File

@ -0,0 +1,2 @@
- !include "public_t1.yaml"
- !include "public_t2.yaml"

View File

@ -0,0 +1,2 @@
- !include "public_get_t1.yaml"
- !include "public_get_t2.yaml"

View File

@ -0,0 +1,3 @@
function:
name: get_t1
schema: public

View File

@ -0,0 +1,3 @@
function:
name: get_t2
schema: public

View File

@ -0,0 +1,3 @@
table:
name: t1
schema: public

View File

@ -0,0 +1,3 @@
table:
name: t2
schema: public

View File

@ -0,0 +1,2 @@
- !include "public_t1.yaml"
- !include "public_t2.yaml"

View File

@ -1,4 +1,4 @@
package sources
package metadatautil
import (
"fmt"
@ -11,21 +11,21 @@ import (
const includeTag = "!include"
type sourcesYamlDecoderOpts struct {
type YamlDecoderOpts struct {
// directory which is to be used as the parent directory to look for filenames
// specified in !include tag
IncludeTagBaseDirectory string
}
type sourcesYamlDecoder struct {
type yamlDecoder struct {
destination interface{}
opts sourcesYamlDecoderOpts
opts YamlDecoderOpts
}
func newSourcesYamlDecoder(opts sourcesYamlDecoderOpts, destination interface{}) *sourcesYamlDecoder {
return &sourcesYamlDecoder{destination, opts}
func NewYamlDecoder(opts YamlDecoderOpts, destination interface{}) *yamlDecoder {
return &yamlDecoder{destination, opts}
}
func (s *sourcesYamlDecoder) UnmarshalYAML(value *yaml.Node) error {
func (s *yamlDecoder) UnmarshalYAML(value *yaml.Node) error {
ctx := map[string]string{}
ctx[includeTag] = s.opts.IncludeTagBaseDirectory
@ -64,6 +64,10 @@ func resolveTags(ctx map[string]string, node *yaml.Node) (*yaml.Node, error) {
}
fileLocation := filepath.Join(baseDir, node.Value)
file, err := ioutil.ReadFile(fileLocation)
if filepath.Ext(fileLocation) != ".yaml" {
node.Value = string(file)
return node, nil
}
if err != nil {
return nil, fmt.Errorf("%s: %w", fileLocation, err)
}

View File

@ -1,4 +1,4 @@
package sources
package metadatautil
import (
"io/ioutil"
@ -113,9 +113,6 @@ actions: "!include actions.yaml"
max_connections: 50
retries: 1
use_prepared_statements: true
query_tags:
disabled: false
format: standard
tables:
- table:
name: t1