From 3d001fedb8fbebbc87b809eb7e19aed211ac22ca Mon Sep 17 00:00:00 2001 From: paritosh-08 <85472423+paritosh-08@users.noreply.github.com> Date: Thu, 30 Jun 2022 11:25:50 +0530 Subject: [PATCH] server: respect experimental flag for naming convention of table column names PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4845 GitOrigin-RevId: f1347ec64ff883d1dcc87e588a063557d37cb968 --- .circleci/test-server.sh | 9 +- server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs | 10 +- ..._convention_without_feature_turned_on.yaml | 191 ++++++++++++++++++ server/tests-py/test_naming_conventions.py | 12 ++ 4 files changed, 217 insertions(+), 5 deletions(-) create mode 100644 server/tests-py/queries/naming_conventions/naming_convention_without_feature_turned_on.yaml diff --git a/.circleci/test-server.sh b/.circleci/test-server.sh index c92ba3d13d1..18fe200cf2c 100755 --- a/.circleci/test-server.sh +++ b/.circleci/test-server.sh @@ -724,11 +724,18 @@ naming-conventions) echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH EXPERIMENTAL FEATURE: NAMING CONVENTIONS ########>\n" export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM" + run_hge_with_args serve + wait_for_port 8080 + + pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" test_naming_conventions.py::TestNamingConventionWithoutExperimentalFeature + + kill_hge_servers + export HASURA_GRAPHQL_EXPERIMENTAL_FEATURES=naming_convention run_hge_with_args serve wait_for_port 8080 - pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" test_naming_conventions.py + pytest -n 1 --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" test_naming_conventions.py::TestNamingConventions kill_hge_servers diff --git a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs index d343093f735..deaf73a62da 100644 --- a/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs +++ b/server/src-lib/Hasura/RQL/DDL/Schema/Cache.hs @@ -521,6 +521,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do |) (tableCoreInfos `alignTableMap` mapFromL _tpiTable permissions `alignTableMap` eventTriggerInfoMaps) defaultNC <- bindA -< _sccDefaultNamingConvention <$> askServerConfigCtx + isNamingConventionEnabled <- bindA -< ((EFNamingConventions `elem`) . _sccExperimentalFeatures) <$> askServerConfigCtx -- sql functions functionCache <- @@ -551,7 +552,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do rawfunctionInfo <- bindErrorA -< handleMultipleFunctions @b qf funcDefs let metadataPermissions = mapFromL _fpmRole functionPermissions permissionsMap = mkBooleanPermissionMap FunctionPermissionInfo metadataPermissions orderedRoles - let !namingConv = getNamingConvention sourceCustomization defaultNC + let !namingConv = if isNamingConventionEnabled then getNamingConvention sourceCustomization defaultNC else HasuraCase (functionInfo, dep) <- bindErrorA -< buildFunctionInfo sourceName qf systemDefined config permissionsMap rawfunctionInfo comment namingConv recordDependencies -< (metadataObject, schemaObject, [dep]) returnA -< functionInfo @@ -637,6 +638,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do let remoteSchemaCtxMap = M.map (fst . fst) remoteSchemaMap defaultNC <- bindA -< _sccDefaultNamingConvention <$> askServerConfigCtx + isNamingConventionEnabled <- bindA -< ((EFNamingConventions `elem`) . _sccExperimentalFeatures) <$> askServerConfigCtx -- sources are build in two steps -- first we resolve them, and build the table cache @@ -645,7 +647,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do Inc.keyed ( \_ exists -> AB.dispatchAnyBackendArrow @BackendMetadata @BackendEventTrigger - ( proc (backendConfigAndSourceMetadata, (invalidationKeys, defaultNC)) -> do + ( proc (backendConfigAndSourceMetadata, (invalidationKeys, defaultNC, isNamingConventionEnabled)) -> do let sourceMetadata = _bcasmSourceMetadata backendConfigAndSourceMetadata sourceName = _smName sourceMetadata sourceInvalidationsKeys = Inc.selectD #_ikSources invalidationKeys @@ -655,7 +657,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do Just (source :: ResolvedSource b) -> do let metadataInvalidationKey = Inc.selectD #_ikMetadata invalidationKeys (tableInputs, _, _) = unzip3 $ map mkTableInputs $ OMap.elems $ _smTables sourceMetadata - !namingConv = getNamingConvention (_smCustomization sourceMetadata) defaultNC + !namingConv = if isNamingConventionEnabled then getNamingConvention (_smCustomization sourceMetadata) defaultNC else HasuraCase tablesCoreInfo <- buildTableCache -< @@ -692,7 +694,7 @@ buildSchemaCacheRule logger env = proc (metadata, invalidationKeys) -> do PartiallyResolvedSource sourceMetadata source tablesCoreInfo eventTriggerInfoMaps ) -< - (exists, (invalidationKeys, defaultNC)) + (exists, (invalidationKeys, defaultNC, isNamingConventionEnabled)) ) |) (M.fromList $ OMap.toList backendConfigAndSourceMetadata) >-> (\infos -> M.catMaybes infos >- returnA) diff --git a/server/tests-py/queries/naming_conventions/naming_convention_without_feature_turned_on.yaml b/server/tests-py/queries/naming_conventions/naming_convention_without_feature_turned_on.yaml new file mode 100644 index 00000000000..8724d824470 --- /dev/null +++ b/server/tests-py/queries/naming_conventions/naming_convention_without_feature_turned_on.yaml @@ -0,0 +1,191 @@ +# Test with graphql-default naming convention + +- description: PG add source + url: /v1/metadata + status: 200 + response: + message: success + query: + type: pg_add_source + args: + name: pg1 + configuration: + connection_info: + database_url: + from_env: HASURA_GRAPHQL_PG_SOURCE_URL_1 + pool_settings: + max_connections: 50 + idle_timeout: 180 + retries: + customization: + naming_convention: graphql-default + +- description: create table 1 + url: /v1/query + status: 200 + response: + result_type: CommandOk + result: + query: + type: run_sql + args: + source: pg1 + sql: | + create table author_local( + id serial primary key, + author_name text unique + ); + INSERT INTO author_local (author_name) + VALUES ('Author 1'), ('Author 2'); + +- description: track table + url: /v1/metadata + status: 200 + response: + message: success + query: + type: pg_track_table + args: + table: author_local + source: pg1 + + +- description: Simple GraphQL query to fetch items from the source table + url: /v1/graphql + status: 200 + response: + data: + author_local: + - id: 1 + author_name: 'Author 1' + __typename: author_local + - id: 2 + author_name: 'Author 2' + __typename: author_local + query: + query: | + query { + author_local { + id + author_name + __typename + } + } + +- description: Lookup by pk + url: /v1/graphql + status: 200 + response: + data: + author_local_by_pk: + id: 1 + author_name: 'Author 1' + __typename: author_local + query: + query: | + query { + author_local_by_pk(id: 1) { + id + author_name + __typename + } + } + +- description: Aggregate + url: /v1/graphql + status: 200 + response: + data: + author_local_aggregate: + __typename: author_local_aggregate + aggregate: + __typename: author_local_aggregate_fields + count: 1 + query: + query: | + query MyQuery { + author_local_aggregate(where: {author_name: {_eq: "Author 2"}}) { + __typename + aggregate { + __typename + count + } + } + } + +- description: Insert + url: /v1/graphql + status: 200 + response: + data: + insert_author_local: + __typename: author_local_mutation_response + returning: + - __typename: author_local + id: 3 + author_name: Author 3 + query: + query: | + mutation MyMutation { + insert_author_local(objects: {author_name: "Author 3", id: 3}) { + __typename + returning { + __typename + id + author_name + } + } + } + +- description: Delete by pk + url: /v1/graphql + status: 200 + response: + data: + delete_author_local_by_pk: + __typename: author_local + id: 3 + author_name: Author 3 + query: + query: | + mutation MyMutation { + delete_author_local_by_pk(id: 3) { + __typename + id + author_name + } + } + +- description: untrack table + url: /v1/metadata + status: 200 + response: + message: success + query: + type: pg_untrack_table + args: + table: author_local + source: pg1 + +- description: drop table + url: /v1/query + status: 200 + response: + result_type: CommandOk + result: + query: + type: run_sql + args: + source: pg1 + sql: | + drop table author_local; + +- description: PG Drop Source 1 + url: /v1/metadata + status: 200 + response: + message: success + query: + type: pg_drop_source + args: + name: pg1 diff --git a/server/tests-py/test_naming_conventions.py b/server/tests-py/test_naming_conventions.py index a22a9558d86..962076dd0af 100644 --- a/server/tests-py/test_naming_conventions.py +++ b/server/tests-py/test_naming_conventions.py @@ -46,3 +46,15 @@ class TestDefaultNamingConvention: def test_default_global_naming_convention(self, hge_ctx): check_query_f(hge_ctx, self.dir() + '/default_global_naming_convention.yaml') + +@pytest.mark.skipif( not (os.getenv('HASURA_GRAPHQL_EXPERIMENTAL_FEATURES') is None or + not 'graphql-default' in os.getenv('HASURA_GRAPHQL_EXPERIMENTAL_FEATURES')), + reason="This test expects the (naming_convention) experimental feature turned OFF") +class TestNamingConventionWithoutExperimentalFeature: + + @classmethod + def dir(cls): + return "queries/naming_conventions" + + def test_naming_convention_without_feature_turned_on(self, hge_ctx): + check_query_f(hge_ctx, self.dir() + '/naming_convention_without_feature_turned_on.yaml')