From 7872be0e8275c3f766e06ecd003a52754bb0bf8a Mon Sep 17 00:00:00 2001 From: Gil Mizrahi Date: Tue, 28 Feb 2023 13:17:08 +0200 Subject: [PATCH] feature(server): support subscriptions in logical models PR-URL: https://github.com/hasura/graphql-engine-mono/pull/8076 GitOrigin-RevId: 84a3e89d97bdb81c02803b644f417dfe51834405 --- server/lib/api-tests/api-tests.cabal | 1 + .../LogicalModels/SubscriptionsSpec.hs | 215 ++++++++++++++++++ .../Backends/Postgres/Execute/Subscription.hs | 9 +- .../Hasura/Backends/Postgres/SQL/DML.hs | 20 +- .../Postgres/SQL/RenameIdentifiers.hs | 4 +- .../Translate/Select/Internal/Helpers.hs | 6 + .../Hasura/Eventing/ScheduledTrigger.hs | 15 +- server/src-lib/Hasura/GraphQL/Schema.hs | 36 +-- .../src-lib/Hasura/GraphQL/Schema/Common.hs | 6 + .../src-lib/Hasura/LogicalModel/Metadata.hs | 4 +- 10 files changed, 287 insertions(+), 29 deletions(-) create mode 100644 server/lib/api-tests/src/Test/Queries/LogicalModels/SubscriptionsSpec.hs diff --git a/server/lib/api-tests/api-tests.cabal b/server/lib/api-tests/api-tests.cabal index f1b110bab89..42f59b14cde 100644 --- a/server/lib/api-tests/api-tests.cabal +++ b/server/lib/api-tests/api-tests.cabal @@ -173,6 +173,7 @@ library Test.Queries.FilterSearch.AggregationPredicatesSpec Test.Queries.FilterSearch.FilterSearchSpec Test.Queries.LogicalModels.LogicalModelsQueriesSpec + Test.Queries.LogicalModels.SubscriptionsSpec Test.Queries.NestedObjectSpec Test.Queries.Paginate.LimitSpec Test.Queries.Paginate.OffsetSpec diff --git a/server/lib/api-tests/src/Test/Queries/LogicalModels/SubscriptionsSpec.hs b/server/lib/api-tests/src/Test/Queries/LogicalModels/SubscriptionsSpec.hs new file mode 100644 index 00000000000..15fc8eac357 --- /dev/null +++ b/server/lib/api-tests/src/Test/Queries/LogicalModels/SubscriptionsSpec.hs @@ -0,0 +1,215 @@ +{-# LANGUAGE QuasiQuotes #-} + +-- | Test subscriptions over logical models +module Test.Queries.LogicalModels.SubscriptionsSpec (spec) where + +import Data.Aeson (Value) +import Data.List.NonEmpty qualified as NE +import Data.Time.Calendar.OrdinalDate +import Data.Time.Clock +import Database.PG.Query qualified as PG +import Harness.Backend.Postgres qualified as Postgres +import Harness.GraphqlEngine qualified as GraphqlEngine +import Harness.Quoter.Graphql +import Harness.Quoter.Yaml (interpolateYaml, yaml) +import Harness.Subscriptions +import Harness.Test.BackendType qualified as BackendType +import Harness.Test.Fixture qualified as Fixture +import Harness.Test.Schema (Table (..), table) +import Harness.Test.Schema qualified as Schema +import Harness.TestEnvironment (GlobalTestEnvironment, TestEnvironment, getBackendTypeConfig) +import Harness.Yaml (shouldReturnYaml) +import Hasura.Prelude +import Test.Hspec (SpecWith, describe, it, shouldContain) + +-- ** Preamble + +featureFlagForLogicalModels :: String +featureFlagForLogicalModels = "HASURA_FF_LOGICAL_MODEL_INTERFACE" + +spec :: SpecWith GlobalTestEnvironment +spec = + Fixture.hgeWithEnv [(featureFlagForLogicalModels, "True")] $ + Fixture.run + ( NE.fromList + [ (Fixture.fixture $ Fixture.Backend Postgres.backendTypeMetadata) + { Fixture.setupTeardown = \(testEnvironment, _) -> + [ Postgres.setupTablesAction schema testEnvironment + ] + } + ] + ) + tests + +-- ** Setup and teardown + +-- we add and track a table here as it's the only way we can currently define a +-- return type +schema :: [Schema.Table] +schema = + [ (table "article") + { tableColumns = + [ Schema.column "id" Schema.TInt, + Schema.column "title" Schema.TStr, + Schema.column "content" Schema.TStr, + Schema.column "date" Schema.TUTCTime + ], + tableData = + [ [ Schema.VInt 1, + Schema.VStr "Dogs", + Schema.VStr "I like to eat dog food I am a dogs I like to eat dog food I am a dogs I like to eat dog food I am a dogs", + Schema.VUTCTime (UTCTime (fromOrdinalDate 2000 1) 0) + ] + ] + } + ] + +tests :: Fixture.Options -> SpecWith TestEnvironment +tests opts = do + let shouldBe :: IO Value -> Value -> IO () + shouldBe = shouldReturnYaml opts + + withSubscriptions do + describe "A subscription on a logical model" do + it "is updated on database changes" $ \(mkSubscription, testEnvironment) -> do + let backendTypeMetadata = fromMaybe (error "Unknown backend") $ getBackendTypeConfig testEnvironment + sourceName = BackendType.backendSourceName backendTypeMetadata + backendPrefix = BackendType.backendTypeString backendTypeMetadata + spicyQuery :: Text + spicyQuery = + [PG.sql| + select + id, + title, + (substring(content, 1, {{length}}) || (case when length(content) < {{length}} then '' else '...' end)) as excerpt, + date + from article + |] + + shouldReturnYaml + opts + ( GraphqlEngine.postMetadata + testEnvironment + [yaml| + type: pg_track_logical_model + args: + type: query + source: *sourceName + root_field_name: article_with_excerpt + code: *spicyQuery + arguments: + length: int + returns: + columns: + id: integer + title: text + excerpt: text + date: date + |] + ) + [yaml| + message: success + |] + + query <- + mkSubscription + [graphql| + subscription { + article_with_excerpt(args: { length: "34" }) { + id + title + date + excerpt + } + } + |] + [] + -- check initial query result + do + let expected :: Value + expected = + [yaml| + data: + article_with_excerpt: + - id: 1 + title: "Dogs" + date: "2000-01-01T00:00:00" + excerpt: "I like to eat dog food I am a dogs..." + |] + actual :: IO Value + actual = getNextResponse query + + actual `shouldBe` expected + + -- add a row + do + expected <- + GraphqlEngine.postV2Query 200 testEnvironment $ + [interpolateYaml| + type: #{backendPrefix}_run_sql + args: + cascade: false + read_only: false + source: #{sourceName} + sql: | + insert into article values( + 2, + 'Cats', + 'I like to eat cat food I am a cats I like to eat cat food I am a cats I like to eat cat food I am a cats', + '2000-01-01' + ); + |] + show expected `shouldContain` "CommandOk" + + -- check updated response + do + let expected :: Value + expected = + [yaml| + data: + article_with_excerpt: + - id: 1 + title: "Dogs" + date: "2000-01-01T00:00:00" + excerpt: "I like to eat dog food I am a dogs..." + - id: 2 + title: "Cats" + date: "2000-01-01T00:00:00" + excerpt: "I like to eat cat food I am a cats..." + |] + actual :: IO Value + actual = getNextResponse query + + actual `shouldBe` expected + + -- delete a row + do + expected <- + GraphqlEngine.postV2Query 200 testEnvironment $ + [interpolateYaml| + type: #{backendPrefix}_run_sql + args: + cascade: false + read_only: false + source: #{sourceName} + sql: | + delete from article where id = 2; + |] + show expected `shouldContain` "CommandOk" + + -- check updated response + do + let expected :: Value + expected = + [yaml| + data: + article_with_excerpt: + - id: 1 + title: "Dogs" + date: "2000-01-01T00:00:00" + excerpt: "I like to eat dog food I am a dogs..." + |] + actual :: IO Value + actual = getNextResponse query + + actual `shouldBe` expected diff --git a/server/src-lib/Hasura/Backends/Postgres/Execute/Subscription.hs b/server/src-lib/Hasura/Backends/Postgres/Execute/Subscription.hs index b06cdcaf7a2..d42a4ebccaf 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Execute/Subscription.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Execute/Subscription.hs @@ -38,7 +38,7 @@ import Hasura.Backends.Postgres.SQL.Types import Hasura.Backends.Postgres.SQL.Value import Hasura.Backends.Postgres.Translate.Column (toTxtValue) import Hasura.Backends.Postgres.Translate.Select qualified as DS -import Hasura.Backends.Postgres.Translate.Select.Internal.Helpers (customSQLToTopLevelCTEs, toQuery) +import Hasura.Backends.Postgres.Translate.Select.Internal.Helpers (customSQLToInnerCTEs, toQuery) import Hasura.Backends.Postgres.Translate.Types (CustomSQLCTEs (..)) import Hasura.Backends.Postgres.Types.Column import Hasura.Base.Error @@ -159,7 +159,8 @@ mkMultiplexedQuery rootFields = ] } - selectWith = S.SelectWith (customSQLToTopLevelCTEs customSQLCTEs) select + -- multiplexed queries may only contain read only raw queries + selectWith = S.SelectWith [] select -- FROM unnest($1::uuid[], $2::json[]) _subs (result_id, result_vars) subsInputFromItem = @@ -181,6 +182,7 @@ mkMultiplexedQuery rootFields = selectRootFields = S.mkSelect { S.selExtr = [S.Extractor rootFieldsJsonAggregate (Just $ S.toColumnAlias $ Identifier "root")], + S.selCTEs = customSQLToInnerCTEs customSQLCTEs, S.selFrom = Just $ S.FromExp sqlFrom } @@ -204,7 +206,7 @@ mkStreamingMultiplexedQuery :: mkStreamingMultiplexedQuery (fieldAlias, resolvedAST) = MultiplexedQuery . toQuery $ selectWith where - selectWith = S.SelectWith (customSQLToTopLevelCTEs customSQLCTEs) select + selectWith = S.SelectWith [] select select = S.mkSelect @@ -237,6 +239,7 @@ mkStreamingMultiplexedQuery (fieldAlias, resolvedAST) = selectRootFields = S.mkSelect { S.selExtr = [(S.Extractor rootFieldJsonAggregate (Just $ S.toColumnAlias $ Identifier "root")), cursorExtractor], + S.selCTEs = customSQLToInnerCTEs customSQLCTEs, S.selFrom = Just $ S.FromExp [fromSQL] } diff --git a/server/src-lib/Hasura/Backends/Postgres/SQL/DML.hs b/server/src-lib/Hasura/Backends/Postgres/SQL/DML.hs index 4b547eb3be1..15fbf86194d 100644 --- a/server/src-lib/Hasura/Backends/Postgres/SQL/DML.hs +++ b/server/src-lib/Hasura/Backends/Postgres/SQL/DML.hs @@ -7,6 +7,7 @@ module Hasura.Backends.Postgres.SQL.DML BinOp (AndOp, OrOp), BoolExp (..), TopLevelCTE (CTEDelete, CTEInsert, CTESelect, CTEUpdate, CTEUnsafeRawSQL), + InnerCTE (..), CompareOp (SContainedIn, SContains, SEQ, SGT, SGTE, SHasKey, SHasKeysAll, SHasKeysAny, SILIKE, SIREGEX, SLIKE, SLT, SLTE, SMatchesFulltext, SNE, SNILIKE, SNIREGEX, SNLIKE, SNREGEX, SNSIMILAR, SREGEX, SSIMILAR), CountType (CTDistinct, CTSimple, CTStar), DistinctExpr (DistinctOn, DistinctSimple), @@ -124,7 +125,7 @@ import Text.Builder qualified as TB data Select = Select { -- | Unlike 'SelectWith', does not allow data-modifying statements (as those are only allowed at -- the top level of a query). - selCTEs :: [(TableAlias, Select)], + selCTEs :: [(TableAlias, InnerCTE)], selDistinct :: Maybe DistinctExpr, selExtr :: [Extractor], selFrom :: Maybe FromExp, @@ -315,7 +316,7 @@ instance ToSQL Select where <~> toSQL (selLimit sel) <~> toSQL (selOffset sel) -- reuse SelectWith if there are any CTEs, since the generated SQL is the same - ctes -> toSQL $ SelectWith (map (CTESelect <$>) ctes) sel {selCTEs = []} + ctes -> toSQL $ SelectWith (map (toTopLevelCTE <$>) ctes) sel {selCTEs = []} mkSIdenExp :: (IsIdentifier a) => a -> SQLExp mkSIdenExp = SEIdentifier . toIdentifier @@ -1189,6 +1190,21 @@ instance ToSQL TopLevelCTE where -- if the user has a comment on the last line, this will make sure it doesn't interrupt the rest of the query <> "\n" +-- | Represents a common table expresion that can be used in nested selects. +data InnerCTE + = ICTESelect Select + | ICTEUnsafeRawSQL (InterpolatedQuery SQLExp) + deriving (Show, Eq, Generic, Data) + +instance NFData InnerCTE + +instance Hashable InnerCTE + +toTopLevelCTE :: InnerCTE -> TopLevelCTE +toTopLevelCTE = \case + ICTESelect select -> CTESelect select + ICTEUnsafeRawSQL query -> CTEUnsafeRawSQL query + -- | A @SELECT@ statement with Common Table Expressions. -- -- diff --git a/server/src-lib/Hasura/Backends/Postgres/SQL/RenameIdentifiers.hs b/server/src-lib/Hasura/Backends/Postgres/SQL/RenameIdentifiers.hs index 7e8e69a31e3..305d8d11aa5 100644 --- a/server/src-lib/Hasura/Backends/Postgres/SQL/RenameIdentifiers.hs +++ b/server/src-lib/Hasura/Backends/Postgres/SQL/RenameIdentifiers.hs @@ -226,7 +226,9 @@ uSelect (S.Select ctes distinctM extrs fromM whereM groupByM havingM orderByM li newCTEs <- for ctes $ \(alias, cte) -> (,) <$> addAliasAndPrefixHash alias - <*> uSelect cte + <*> case cte of + S.ICTESelect select -> S.ICTESelect <$> uSelect select + S.ICTEUnsafeRawSQL q -> S.ICTEUnsafeRawSQL <$> traverse uSqlExp q -- Potentially introduces a new alias so it should go before the rest. newFromM <- mapM uFromExp fromM diff --git a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Helpers.hs b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Helpers.hs index ad5c6f8a31a..10132e64720 100644 --- a/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Helpers.hs +++ b/server/src-lib/Hasura/Backends/Postgres/Translate/Select/Internal/Helpers.hs @@ -19,6 +19,7 @@ module Hasura.Backends.Postgres.Translate.Select.Internal.Helpers withForceAggregation, selectToSelectWith, customSQLToTopLevelCTEs, + customSQLToInnerCTEs, logicalModelNameToAlias, toQuery, ) @@ -168,5 +169,10 @@ customSQLToTopLevelCTEs :: CustomSQLCTEs -> [(S.TableAlias, S.TopLevelCTE)] customSQLToTopLevelCTEs = fmap (bimap S.toTableAlias S.CTEUnsafeRawSQL) . Map.toList . getCustomSQLCTEs +-- | convert map of CustomSQL CTEs into named InnerCTEs +customSQLToInnerCTEs :: CustomSQLCTEs -> [(S.TableAlias, S.InnerCTE)] +customSQLToInnerCTEs = + fmap (bimap S.toTableAlias S.ICTEUnsafeRawSQL) . Map.toList . getCustomSQLCTEs + toQuery :: S.SelectWithG S.TopLevelCTE -> Query toQuery = fromBuilder . toSQL . renameIdentifiersSelectWithTopLevelCTE diff --git a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs index 71bbb920444..21ef7e5bebe 100644 --- a/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs +++ b/server/src-lib/Hasura/Eventing/ScheduledTrigger.hs @@ -896,7 +896,7 @@ mkPaginationSelectExp :: S.Select mkPaginationSelectExp allRowsSelect ScheduledEventPagination {..} shouldIncludeRowsCount = S.mkSelect - { S.selCTEs = [(countCteAlias, allRowsSelect), (limitCteAlias, limitCteSelect)], + { S.selCTEs = [(countCteAlias, S.ICTESelect allRowsSelect), (limitCteAlias, limitCteSelect)], S.selExtr = case shouldIncludeRowsCount of IncludeRowsCount -> [countExtractor, rowsExtractor] @@ -915,12 +915,13 @@ mkPaginationSelectExp allRowsSelect ScheduledEventPagination {..} shouldIncludeR in S.Extractor (S.SESelect selectExp) Nothing limitCteSelect = - S.mkSelect - { S.selExtr = [S.selectStar], - S.selFrom = Just $ S.mkIdenFromExp (S.tableAliasToIdentifier countCteAlias), - S.selLimit = (S.LimitExp . S.intToSQLExp) <$> _sepLimit, - S.selOffset = (S.OffsetExp . S.intToSQLExp) <$> _sepOffset - } + S.ICTESelect + S.mkSelect + { S.selExtr = [S.selectStar], + S.selFrom = Just $ S.mkIdenFromExp (S.tableAliasToIdentifier countCteAlias), + S.selLimit = (S.LimitExp . S.intToSQLExp) <$> _sepLimit, + S.selOffset = (S.OffsetExp . S.intToSQLExp) <$> _sepOffset + } rowsExtractor = let jsonAgg = S.SEUnsafe "json_agg(row_to_json(limit_cte.*))" diff --git a/server/src-lib/Hasura/GraphQL/Schema.hs b/server/src-lib/Hasura/GraphQL/Schema.hs index 090603d8b61..69d191a2cda 100644 --- a/server/src-lib/Hasura/GraphQL/Schema.hs +++ b/server/src-lib/Hasura/GraphQL/Schema.hs @@ -323,27 +323,26 @@ buildRoleContext options sources remotes actions customTypes role remoteSchemaPe SourceInfo b -> MemoizeT m - ( [FieldParser P.Parse (NamespacedField (QueryRootField UnpreparedValue))], - [FieldParser P.Parse (NamespacedField (MutationRootField UnpreparedValue))], - [FieldParser P.Parse (NamespacedField (MutationRootField UnpreparedValue))], - [FieldParser P.Parse (NamespacedField (QueryRootField UnpreparedValue))], - [(G.Name, Parser 'Output P.Parse (ApolloFederationParserFunction P.Parse))] + ( [FieldParser P.Parse (NamespacedField (QueryRootField UnpreparedValue))], -- query fields + [FieldParser P.Parse (NamespacedField (MutationRootField UnpreparedValue))], -- mutation backend fields + [FieldParser P.Parse (NamespacedField (MutationRootField UnpreparedValue))], -- mutation frontend fields + [FieldParser P.Parse (NamespacedField (QueryRootField UnpreparedValue))], -- subscription fields + [(G.Name, Parser 'Output P.Parse (ApolloFederationParserFunction P.Parse))] -- apollo federation tables ) buildSource schemaContext schemaOptions sourceInfo@(SourceInfo _ tables functions logicalModels _ _ sourceCustomization) = runSourceSchema schemaContext schemaOptions sourceInfo do let validFunctions = takeValidFunctions functions + validLogicalModels = takeValidLogicalModels logicalModels validTables = takeValidTables tables mkRootFieldName = _rscRootFields sourceCustomization makeTypename = SC._rscTypeNames sourceCustomization (uncustomizedQueryRootFields, uncustomizedSubscriptionRootFields, apolloFedTableParsers) <- - buildQueryAndSubscriptionFields mkRootFieldName sourceInfo validTables validFunctions - logicalModelRootFields <- - buildLogicalModelFields sourceInfo logicalModels + buildQueryAndSubscriptionFields mkRootFieldName sourceInfo validTables validFunctions validLogicalModels (,,,,apolloFedTableParsers) <$> customizeFields sourceCustomization (makeTypename <> MkTypename (<> Name.__query)) - (pure (uncustomizedQueryRootFields <> logicalModelRootFields)) + (pure uncustomizedQueryRootFields) <*> customizeFields sourceCustomization (makeTypename <> MkTypename (<> Name.__mutation_frontend)) @@ -642,8 +641,15 @@ buildQueryAndSubscriptionFields :: SourceInfo b -> TableCache b -> FunctionCache b -> - SchemaT r m ([P.FieldParser n (QueryRootField UnpreparedValue)], [P.FieldParser n (SubscriptionRootField UnpreparedValue)], [(G.Name, Parser 'Output n (ApolloFederationParserFunction n))]) -buildQueryAndSubscriptionFields mkRootFieldName sourceInfo tables (takeExposedAs FEAQuery -> functions) = do + LogicalModels b -> + SchemaT + r + m + ( [P.FieldParser n (QueryRootField UnpreparedValue)], + [P.FieldParser n (SubscriptionRootField UnpreparedValue)], + [(G.Name, Parser 'Output n (ApolloFederationParserFunction n))] + ) +buildQueryAndSubscriptionFields mkRootFieldName sourceInfo tables (takeExposedAs FEAQuery -> functions) logicalModels = do roleName <- retrieve scRole functionPermsCtx <- retrieve Options.soInferFunctionPermissions functionSelectExpParsers <- @@ -655,6 +661,8 @@ buildQueryAndSubscriptionFields mkRootFieldName sourceInfo tables (takeExposedAs || functionPermsCtx == Options.InferFunctionPermissions let targetTableName = _fiReturnType functionInfo lift $ mkRFs $ buildFunctionQueryFields mkRootFieldName functionName functionInfo targetTableName + logicalModelRootFields <- + buildLogicalModelFields sourceInfo logicalModels (tableQueryFields, tableSubscriptionFields, apolloFedTableParsers) <- unzip3 . catMaybes @@ -666,8 +674,8 @@ buildQueryAndSubscriptionFields mkRootFieldName sourceInfo tables (takeExposedAs tableSubscriptionRootFields = fmap mkRF $ concat tableSubscriptionFields pure - ( tableQueryRootFields <> functionSelectExpParsers, - tableSubscriptionRootFields <> functionSelectExpParsers, + ( tableQueryRootFields <> functionSelectExpParsers <> logicalModelRootFields, + tableSubscriptionRootFields <> functionSelectExpParsers <> logicalModelRootFields, catMaybes apolloFedTableParsers ) where @@ -694,7 +702,7 @@ buildLogicalModelFields sourceInfo logicalModels = runMaybeTmempty $ do guard $ roleName == adminRoleName map mkRF . catMaybes <$> for (OMap.elems logicalModels) \model -> do - lift $ (buildLogicalModelRootFields model) + lift (buildLogicalModelRootFields model) where mkRF :: FieldParser n (QueryDB b (RemoteRelationshipField UnpreparedValue) (UnpreparedValue b)) -> diff --git a/server/src-lib/Hasura/GraphQL/Schema/Common.hs b/server/src-lib/Hasura/GraphQL/Schema/Common.hs index 0a01ef540f5..f355de6f72e 100644 --- a/server/src-lib/Hasura/GraphQL/Schema/Common.hs +++ b/server/src-lib/Hasura/GraphQL/Schema/Common.hs @@ -40,6 +40,7 @@ module Hasura.GraphQL.Schema.Common parsedSelectionsToFields, partialSQLExpToUnpreparedValue, requiredFieldParser, + takeValidLogicalModels, takeValidFunctions, takeValidTables, textToName, @@ -77,6 +78,7 @@ import Hasura.RQL.IR.BoolExp import Hasura.RQL.Types.Backend import Hasura.RQL.Types.Common import Hasura.RQL.Types.Function +import Hasura.RQL.Types.Metadata.Common (LogicalModels) import Hasura.RQL.Types.Relationships.Remote import Hasura.RQL.Types.SchemaCache hiding (askTableInfo) import Hasura.RQL.Types.Source @@ -426,6 +428,10 @@ takeValidFunctions = Map.filter functionFilter where functionFilter = not . isSystemDefined . _fiSystemDefined +-- | Currently we do no validation on logical models in schema. Should we? +takeValidLogicalModels :: forall b. LogicalModels b -> LogicalModels b +takeValidLogicalModels = id + -- root field builder helpers requiredFieldParser :: diff --git a/server/src-lib/Hasura/LogicalModel/Metadata.hs b/server/src-lib/Hasura/LogicalModel/Metadata.hs index bfb4219f3c6..da8dea730c1 100644 --- a/server/src-lib/Hasura/LogicalModel/Metadata.hs +++ b/server/src-lib/Hasura/LogicalModel/Metadata.hs @@ -40,7 +40,7 @@ data InterpolatedItem variable IIText Text | -- | a captured variable IIVariable variable - deriving stock (Eq, Ord, Show, Functor, Foldable, Generic, Traversable) + deriving stock (Eq, Ord, Show, Functor, Foldable, Data, Generic, Traversable) -- | Converting an interpolated query back to text. -- Should roundtrip with the 'parseInterpolatedQuery'. @@ -60,7 +60,7 @@ newtype InterpolatedQuery variable = InterpolatedQuery { getInterpolatedQuery :: [InterpolatedItem variable] } deriving newtype (Eq, Ord, Show, Generic) - deriving stock (Functor, Foldable, Traversable) + deriving stock (Data, Functor, Foldable, Traversable) deriving newtype instance (Hashable variable) => Hashable (InterpolatedQuery variable)