mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
server: respect experimental flag for naming convention of table column names
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4845 GitOrigin-RevId: f1347ec64ff883d1dcc87e588a063557d37cb968
This commit is contained in:
parent
07875ace0f
commit
3d001fedb8
@ -724,11 +724,18 @@ naming-conventions)
|
||||
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH EXPERIMENTAL FEATURE: NAMING CONVENTIONS ########>\n"
|
||||
|
||||
export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM"
|
||||
run_hge_with_args serve
|
||||
wait_for_port 8080
|
||||
|
||||
pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" test_naming_conventions.py::TestNamingConventionWithoutExperimentalFeature
|
||||
|
||||
kill_hge_servers
|
||||
|
||||
export HASURA_GRAPHQL_EXPERIMENTAL_FEATURES=naming_convention
|
||||
run_hge_with_args serve
|
||||
wait_for_port 8080
|
||||
|
||||
pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" test_naming_conventions.py
|
||||
pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" test_naming_conventions.py::TestNamingConventions
|
||||
|
||||
kill_hge_servers
|
||||
|
||||
|
@ -521,6 +521,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do
|
||||
|) (tableCoreInfos `alignTableMap` mapFromL _tpiTable permissions `alignTableMap` eventTriggerInfoMaps)
|
||||
|
||||
defaultNC <- bindA -< _sccDefaultNamingConvention <$> askServerConfigCtx
|
||||
isNamingConventionEnabled <- bindA -< ((EFNamingConventions `elem`) . _sccExperimentalFeatures) <$> askServerConfigCtx
|
||||
|
||||
-- sql functions
|
||||
functionCache <-
|
||||
@ -551,7 +552,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do
|
||||
rawfunctionInfo <- bindErrorA -< handleMultipleFunctions @b qf funcDefs
|
||||
let metadataPermissions = mapFromL _fpmRole functionPermissions
|
||||
permissionsMap = mkBooleanPermissionMap FunctionPermissionInfo metadataPermissions orderedRoles
|
||||
let !namingConv = getNamingConvention sourceCustomization defaultNC
|
||||
let !namingConv = if isNamingConventionEnabled then getNamingConvention sourceCustomization defaultNC else HasuraCase
|
||||
(functionInfo, dep) <- bindErrorA -< buildFunctionInfo sourceName qf systemDefined config permissionsMap rawfunctionInfo comment namingConv
|
||||
recordDependencies -< (metadataObject, schemaObject, [dep])
|
||||
returnA -< functionInfo
|
||||
@ -637,6 +638,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do
|
||||
let remoteSchemaCtxMap = M.map (fst . fst) remoteSchemaMap
|
||||
|
||||
defaultNC <- bindA -< _sccDefaultNamingConvention <$> askServerConfigCtx
|
||||
isNamingConventionEnabled <- bindA -< ((EFNamingConventions `elem`) . _sccExperimentalFeatures) <$> askServerConfigCtx
|
||||
|
||||
-- sources are build in two steps
|
||||
-- first we resolve them, and build the table cache
|
||||
@ -645,7 +647,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do
|
||||
Inc.keyed
|
||||
( \_ exists ->
|
||||
AB.dispatchAnyBackendArrow @BackendMetadata @BackendEventTrigger
|
||||
( proc (backendConfigAndSourceMetadata, (invalidationKeys, defaultNC)) -> do
|
||||
( proc (backendConfigAndSourceMetadata, (invalidationKeys, defaultNC, isNamingConventionEnabled)) -> do
|
||||
let sourceMetadata = _bcasmSourceMetadata backendConfigAndSourceMetadata
|
||||
sourceName = _smName sourceMetadata
|
||||
sourceInvalidationsKeys = Inc.selectD #_ikSources invalidationKeys
|
||||
@ -655,7 +657,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do
|
||||
Just (source :: ResolvedSource b) -> do
|
||||
let metadataInvalidationKey = Inc.selectD #_ikMetadata invalidationKeys
|
||||
(tableInputs, _, _) = unzip3 $ map mkTableInputs $ OMap.elems $ _smTables sourceMetadata
|
||||
!namingConv = getNamingConvention (_smCustomization sourceMetadata) defaultNC
|
||||
!namingConv = if isNamingConventionEnabled then getNamingConvention (_smCustomization sourceMetadata) defaultNC else HasuraCase
|
||||
tablesCoreInfo <-
|
||||
buildTableCache
|
||||
-<
|
||||
@ -692,7 +694,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do
|
||||
PartiallyResolvedSource sourceMetadata source tablesCoreInfo eventTriggerInfoMaps
|
||||
)
|
||||
-<
|
||||
(exists, (invalidationKeys, defaultNC))
|
||||
(exists, (invalidationKeys, defaultNC, isNamingConventionEnabled))
|
||||
)
|
||||
|) (M.fromList $ OMap.toList backendConfigAndSourceMetadata)
|
||||
>-> (\infos -> M.catMaybes infos >- returnA)
|
||||
|
@ -0,0 +1,191 @@
|
||||
# Test with graphql-default naming convention
|
||||
|
||||
- description: PG add source
|
||||
url: /v1/metadata
|
||||
status: 200
|
||||
response:
|
||||
message: success
|
||||
query:
|
||||
type: pg_add_source
|
||||
args:
|
||||
name: pg1
|
||||
configuration:
|
||||
connection_info:
|
||||
database_url:
|
||||
from_env: HASURA_GRAPHQL_PG_SOURCE_URL_1
|
||||
pool_settings:
|
||||
max_connections: 50
|
||||
idle_timeout: 180
|
||||
retries:
|
||||
customization:
|
||||
naming_convention: graphql-default
|
||||
|
||||
- description: create table 1
|
||||
url: /v1/query
|
||||
status: 200
|
||||
response:
|
||||
result_type: CommandOk
|
||||
result:
|
||||
query:
|
||||
type: run_sql
|
||||
args:
|
||||
source: pg1
|
||||
sql: |
|
||||
create table author_local(
|
||||
id serial primary key,
|
||||
author_name text unique
|
||||
);
|
||||
INSERT INTO author_local (author_name)
|
||||
VALUES ('Author 1'), ('Author 2');
|
||||
|
||||
- description: track table
|
||||
url: /v1/metadata
|
||||
status: 200
|
||||
response:
|
||||
message: success
|
||||
query:
|
||||
type: pg_track_table
|
||||
args:
|
||||
table: author_local
|
||||
source: pg1
|
||||
|
||||
|
||||
- description: Simple GraphQL query to fetch items from the source table
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
author_local:
|
||||
- id: 1
|
||||
author_name: 'Author 1'
|
||||
__typename: author_local
|
||||
- id: 2
|
||||
author_name: 'Author 2'
|
||||
__typename: author_local
|
||||
query:
|
||||
query: |
|
||||
query {
|
||||
author_local {
|
||||
id
|
||||
author_name
|
||||
__typename
|
||||
}
|
||||
}
|
||||
|
||||
- description: Lookup by pk
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
author_local_by_pk:
|
||||
id: 1
|
||||
author_name: 'Author 1'
|
||||
__typename: author_local
|
||||
query:
|
||||
query: |
|
||||
query {
|
||||
author_local_by_pk(id: 1) {
|
||||
id
|
||||
author_name
|
||||
__typename
|
||||
}
|
||||
}
|
||||
|
||||
- description: Aggregate
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
author_local_aggregate:
|
||||
__typename: author_local_aggregate
|
||||
aggregate:
|
||||
__typename: author_local_aggregate_fields
|
||||
count: 1
|
||||
query:
|
||||
query: |
|
||||
query MyQuery {
|
||||
author_local_aggregate(where: {author_name: {_eq: "Author 2"}}) {
|
||||
__typename
|
||||
aggregate {
|
||||
__typename
|
||||
count
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- description: Insert
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
insert_author_local:
|
||||
__typename: author_local_mutation_response
|
||||
returning:
|
||||
- __typename: author_local
|
||||
id: 3
|
||||
author_name: Author 3
|
||||
query:
|
||||
query: |
|
||||
mutation MyMutation {
|
||||
insert_author_local(objects: {author_name: "Author 3", id: 3}) {
|
||||
__typename
|
||||
returning {
|
||||
__typename
|
||||
id
|
||||
author_name
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- description: Delete by pk
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
delete_author_local_by_pk:
|
||||
__typename: author_local
|
||||
id: 3
|
||||
author_name: Author 3
|
||||
query:
|
||||
query: |
|
||||
mutation MyMutation {
|
||||
delete_author_local_by_pk(id: 3) {
|
||||
__typename
|
||||
id
|
||||
author_name
|
||||
}
|
||||
}
|
||||
|
||||
- description: untrack table
|
||||
url: /v1/metadata
|
||||
status: 200
|
||||
response:
|
||||
message: success
|
||||
query:
|
||||
type: pg_untrack_table
|
||||
args:
|
||||
table: author_local
|
||||
source: pg1
|
||||
|
||||
- description: drop table
|
||||
url: /v1/query
|
||||
status: 200
|
||||
response:
|
||||
result_type: CommandOk
|
||||
result:
|
||||
query:
|
||||
type: run_sql
|
||||
args:
|
||||
source: pg1
|
||||
sql: |
|
||||
drop table author_local;
|
||||
|
||||
- description: PG Drop Source 1
|
||||
url: /v1/metadata
|
||||
status: 200
|
||||
response:
|
||||
message: success
|
||||
query:
|
||||
type: pg_drop_source
|
||||
args:
|
||||
name: pg1
|
@ -46,3 +46,15 @@ class TestDefaultNamingConvention:
|
||||
|
||||
def test_default_global_naming_convention(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/default_global_naming_convention.yaml')
|
||||
|
||||
@pytest.mark.skipif( not (os.getenv('HASURA_GRAPHQL_EXPERIMENTAL_FEATURES') is None or
|
||||
not 'graphql-default' in os.getenv('HASURA_GRAPHQL_EXPERIMENTAL_FEATURES')),
|
||||
reason="This test expects the (naming_convention) experimental feature turned OFF")
|
||||
class TestNamingConventionWithoutExperimentalFeature:
|
||||
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return "queries/naming_conventions"
|
||||
|
||||
def test_naming_convention_without_feature_turned_on(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/naming_convention_without_feature_turned_on.yaml')
|
||||
|
Loading…
Reference in New Issue
Block a user