graphql-engine/server/src-lib/Hasura/Backends/Postgres/Instances/Schema.hs

Ignoring revisions in .git-blame-ignore-revs. Click here to bypass and see the normal blame view.

785 lines
36 KiB
Haskell
Raw Normal View History

{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.Postgres.Instances.Schema
(
)
where
import Data.Aeson qualified as J
import Data.Has
import Data.HashMap.Strict qualified as Map
import Data.HashMap.Strict.Extended qualified as M
import Data.HashMap.Strict.InsOrd.Extended qualified as OMap
import Data.List.NonEmpty qualified as NE
import Data.Parser.JSONPath
import Data.Text qualified as T
import Data.Text.Extended
import Hasura.Backends.Postgres.SQL.DML as PG hiding (CountType)
import Hasura.Backends.Postgres.SQL.Types as PG hiding (FunctionName, TableName)
import Hasura.Backends.Postgres.SQL.Value as PG
import Hasura.Backends.Postgres.Types.BoolExp
import Hasura.Backends.Postgres.Types.Column
import Hasura.Base.Error
import Hasura.GraphQL.Parser hiding (EnumValueInfo, field)
import Hasura.GraphQL.Parser qualified as P
import Hasura.GraphQL.Parser.Internal.Parser hiding (field)
import Hasura.GraphQL.Schema.Backend
( BackendSchema,
ComparisonExp,
MonadBuildSchema,
)
import Hasura.GraphQL.Schema.Backend qualified as BS
import Hasura.GraphQL.Schema.BoolExp
import Hasura.GraphQL.Schema.Build qualified as GSB
import Hasura.GraphQL.Schema.Common
import Hasura.GraphQL.Schema.Mutation qualified as GSB
import Hasura.GraphQL.Schema.Select
import Hasura.GraphQL.Schema.Table
import Hasura.Prelude
import Hasura.RQL.IR
import Hasura.RQL.IR.Select qualified as IR
import Hasura.RQL.IR.Update qualified as IR
import Hasura.RQL.Types
import Hasura.SQL.AnyBackend qualified as AB
import Hasura.SQL.Types
import Language.GraphQL.Draft.Syntax qualified as G
----------------------------------------------------------------
-- BackendSchema instance
-- | This class is an implementation detail of 'BackendSchema'.
-- Some functions of 'BackendSchema' differ across different Postgres "kinds",
-- or call to functions (such as those related to Relay) that have not been
-- generalized to all kinds of Postgres and still explicitly work on Vanilla
-- Postgres. This class alllows each "kind" to specify its own specific
-- implementation. All common code is directly part of `BackendSchema`.
class PostgresSchema (pgKind :: PostgresKind) where
pgkBuildTableRelayQueryFields ::
BS.MonadBuildSchema ('Postgres pgKind) r m n =>
SourceName ->
SourceConfig ('Postgres pgKind) ->
Maybe QueryTagsConfig ->
TableName ('Postgres pgKind) ->
TableInfo ('Postgres pgKind) ->
G.Name ->
NESeq (ColumnInfo ('Postgres pgKind)) ->
SelPermInfo ('Postgres pgKind) ->
m [FieldParser n (QueryRootField UnpreparedValue)]
pgkBuildFunctionRelayQueryFields ::
BS.MonadBuildSchema ('Postgres pgKind) r m n =>
SourceName ->
SourceConfig ('Postgres pgKind) ->
Maybe QueryTagsConfig ->
FunctionName ('Postgres pgKind) ->
FunctionInfo ('Postgres pgKind) ->
TableName ('Postgres pgKind) ->
NESeq (ColumnInfo ('Postgres pgKind)) ->
SelPermInfo ('Postgres pgKind) ->
m [FieldParser n (QueryRootField UnpreparedValue)]
pgkRelayExtension ::
Maybe (XRelay ('Postgres pgKind))
pgkNode ::
BS.MonadBuildSchema ('Postgres pgKind) r m n =>
m
( Parser
'Output
n
( HashMap
( TableName ('Postgres pgKind)
)
( SourceName,
SourceConfig ('Postgres pgKind),
SelPermInfo ('Postgres pgKind),
PrimaryKeyColumns ('Postgres pgKind),
AnnotatedFields ('Postgres pgKind)
)
)
)
instance PostgresSchema 'Vanilla where
pgkBuildTableRelayQueryFields = buildTableRelayQueryFields
pgkBuildFunctionRelayQueryFields = buildFunctionRelayQueryFields
pgkRelayExtension = Just ()
pgkNode = nodePG
instance PostgresSchema 'Citus where
pgkBuildTableRelayQueryFields _ _ _ _ _ _ _ _ = pure []
pgkBuildFunctionRelayQueryFields _ _ _ _ _ _ _ _ = pure []
pgkRelayExtension = Nothing
pgkNode = undefined
-- postgres schema
instance
( Backend ('Postgres pgKind),
PostgresSchema pgKind
) =>
BackendSchema ('Postgres pgKind)
where
-- top level parsers
buildTableQueryFields = GSB.buildTableQueryFields
buildTableRelayQueryFields = pgkBuildTableRelayQueryFields
buildTableInsertMutationFields = GSB.buildTableInsertMutationFields
buildTableUpdateMutationFields = GSB.buildTableUpdateMutationFields updateOperators
buildTableDeleteMutationFields = GSB.buildTableDeleteMutationFields
buildFunctionQueryFields = GSB.buildFunctionQueryFields
buildFunctionRelayQueryFields = pgkBuildFunctionRelayQueryFields
buildFunctionMutationFields = GSB.buildFunctionMutationFields
-- table components
tableArguments = defaultTableArgs
mkRelationshipParser = GSB.mkDefaultRelationshipParser ()
-- backend extensions
relayExtension = pgkRelayExtension @pgKind
nodesAggExtension = Just ()
-- indivdual components
columnParser = columnParser
conflictObject = GSB.defaultConflictObject ()
jsonPathArg = jsonPathArg
orderByOperators = orderByOperators
comparisonExps = comparisonExps
mkCountType = mkCountType
aggregateOrderByCountType = PG.PGInteger
computedField = computedFieldPG
node = pgkNode
-- SQL literals
columnDefaultValue = const PG.columnDefaultValue
-- Extra insert data
getExtraInsertData = const ()
----------------------------------------------------------------
-- Top level parsers
buildTableRelayQueryFields ::
forall pgKind m n r.
MonadBuildSchema ('Postgres pgKind) r m n =>
SourceName ->
SourceConfig ('Postgres pgKind) ->
Maybe QueryTagsConfig ->
TableName ('Postgres pgKind) ->
TableInfo ('Postgres pgKind) ->
G.Name ->
NESeq (ColumnInfo ('Postgres pgKind)) ->
SelPermInfo ('Postgres pgKind) ->
m [FieldParser n (QueryRootField UnpreparedValue)]
buildTableRelayQueryFields sourceName sourceInfo queryTagsConfig tableName tableInfo gqlName pkeyColumns selPerms = do
let mkRF =
RFDB sourceName
. AB.mkAnyBackend
. SourceConfigWith sourceInfo queryTagsConfig
. QDBR
fieldDesc = Just $ G.Description $ "fetch data from the table: " <>> tableName
fieldName <- mkRootFieldName $ gqlName <> $$(G.litName "_connection")
fmap afold $
optionalFieldParser (mkRF . QDBConnection) $
selectTableConnection sourceName tableInfo fieldName fieldDesc pkeyColumns selPerms
buildFunctionRelayQueryFields ::
forall pgKind m n r.
MonadBuildSchema ('Postgres pgKind) r m n =>
SourceName ->
SourceConfig ('Postgres pgKind) ->
Maybe QueryTagsConfig ->
FunctionName ('Postgres pgKind) ->
FunctionInfo ('Postgres pgKind) ->
TableName ('Postgres pgKind) ->
NESeq (ColumnInfo ('Postgres pgKind)) ->
SelPermInfo ('Postgres pgKind) ->
m [FieldParser n (QueryRootField UnpreparedValue)]
buildFunctionRelayQueryFields sourceName sourceInfo queryTagsConfig functionName functionInfo tableName pkeyColumns selPerms = do
let mkRF =
RFDB sourceName
. AB.mkAnyBackend
. SourceConfigWith sourceInfo queryTagsConfig
. QDBR
fieldDesc = Just $ G.Description $ "execute function " <> functionName <<> " which returns " <>> tableName
fmap afold $
optionalFieldParser (mkRF . QDBConnection) $
selectFunctionConnection sourceName functionInfo fieldDesc pkeyColumns selPerms
----------------------------------------------------------------
-- Individual components
columnParser ::
(MonadSchema n m, MonadError QErr m, MonadReader r m, Has P.MkTypename r) =>
ColumnType ('Postgres pgKind) ->
G.Nullability ->
m (Parser 'Both n (ValueWithOrigin (ColumnValue ('Postgres pgKind))))
columnParser columnType (G.Nullability isNullable) =
-- TODO(PDV): It might be worth memoizing this function even though it isnt
-- recursive simply for performance reasons, since its likely to be hammered
-- during schema generation. Need to profile to see whether or not its a win.
server: remove remnants of query plan caching (fix #1795) Query plan caching was introduced by - I believe - hasura/graphql-engine#1934 in order to reduce the query response latency. During the development of PDV in hasura/graphql-engine#4111, it was found out that the new architecture (for which query plan caching wasn't implemented) performed comparably to the pre-PDV architecture with caching. Hence, it was decided to leave query plan caching until some day in the future when it was deemed necessary. Well, we're in the future now, and there still isn't a convincing argument for query plan caching. So the time has come to remove some references to query plan caching from the codebase. For the most part, any code being removed would probably not be very well suited to the post-PDV architecture of query execution, so arguably not much is lost. Apart from simplifying the code, this PR will contribute towards making the GraphQL schema generation more modular, testable, and easier to profile. I'd like to eventually work towards a situation in which it's easy to generate a GraphQL schema parser *in isolation*, without being connected to a database, and then parse a GraphQL query *in isolation*, without even listening any HTTP port. It is important that both of these operations can be examined in detail, and in isolation, since they are two major performance bottlenecks, as well as phases where many important upcoming features hook into. Implementation The following have been removed: - The entirety of `server/src-lib/Hasura/GraphQL/Execute/Plan.hs` - The core phases of query parsing and execution no longer have any references to query plan caching. Note that this is not to be confused with query *response* caching, which is not affected by this PR. This includes removal of the types: - - `Opaque`, which is replaced by a tuple. Note that the old implementation was broken and did not adequately hide the constructors. - - `QueryReusability` (and the `markNotReusable` method). Notably, the implementation of the `ParseT` monad now consists of two, rather than three, monad transformers. - Cache-related tests (in `server/src-test/Hasura/CacheBoundedSpec.hs`) have been removed . - References to query plan caching in the documentation. - The `planCacheOptions` in the `TenantConfig` type class was removed. However, during parsing, unrecognized fields in the YAML config get ignored, so this does not cause a breaking change. (Confirmed manually, as well as in consultation with @sordina.) - The metrics no longer send cache hit/miss messages. There are a few places in which one can still find references to query plan caching: - We still accept the `--query-plan-cache-size` command-line option for backwards compatibility. The `HASURA_QUERY_PLAN_CACHE_SIZE` environment variable is not read. https://github.com/hasura/graphql-engine-mono/pull/1815 GitOrigin-RevId: 17d92b254ec093c62a7dfeec478658ede0813eb7
2021-07-27 14:51:52 +03:00
peelWithOrigin . fmap (ColumnValue columnType) <$> case columnType of
ColumnScalar scalarType ->
possiblyNullable scalarType <$> do
-- We convert the value to JSON and use the FromJSON instance. This avoids
-- having two separate ways of parsing a value in the codebase, which
-- could lead to inconsistencies.
--
-- The mapping from postgres type to GraphQL scalar name is done by
-- 'mkScalarTypeName'. This is confusing, and we might want to fix it
-- later, as we will parse values differently here than how they'd be
-- parsed in other places using the same scalar name; for instance, we
-- will accept strings for postgres columns of type "Integer", despite the
-- fact that they will be represented as GraphQL ints, which otherwise do
-- not accept strings.
--
-- TODO: introduce new dedicated scalars for Postgres column types.
name <- P.Typename <$> mkScalarTypeName scalarType
let schemaType = P.NonNullable $ P.TNamed $ P.mkDefinition name Nothing P.TIScalar
pure $
Parser
{ pType = schemaType,
pParser =
valueToJSON (P.toGraphQLType schemaType) >=> \case
J.Null -> parseError $ "unexpected null value for type " <>> name
value ->
runAesonParser (parsePGValue scalarType) value
`onLeft` (parseErrorWith ParseFailed . qeError)
}
ColumnEnumReference (EnumReference tableName enumValues) ->
case nonEmpty (Map.toList enumValues) of
Just enumValuesList -> do
name <- qualifiedObjectToName tableName <&> (<> $$(G.litName "_enum")) >>= P.mkTypename
pure $ possiblyNullable PGText $ P.enum name Nothing (mkEnumValue <$> enumValuesList)
Nothing -> throw400 ValidationFailed "empty enum values"
where
possiblyNullable scalarType
| isNullable = fmap (fromMaybe $ PGNull scalarType) . P.nullable
| otherwise = id
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, PGScalarValue)
mkEnumValue (EnumValue value, EnumValueInfo description) =
( P.mkDefinition value (G.Description <$> description) P.EnumValueInfo,
PGValText $ G.unName value
)
jsonPathArg ::
MonadParse n =>
ColumnType ('Postgres pgKind) ->
InputFieldsParser n (Maybe (IR.ColumnOp ('Postgres pgKind)))
jsonPathArg columnType
| isScalarColumnWhere PG.isJSONType columnType =
P.fieldOptional fieldName description P.string `P.bindFields` fmap join . traverse toColExp
| otherwise = pure Nothing
where
fieldName = $$(G.litName "path")
description = Just "JSON select path"
toColExp textValue = case parseJSONPath textValue of
Left err -> parseError $ T.pack $ "parse json path error: " ++ err
Right [] -> pure Nothing
Right jPaths -> pure $ Just $ IR.ColumnOp PG.jsonbPathOp $ PG.SEArray $ map elToColExp jPaths
elToColExp (Key k) = PG.SELit k
elToColExp (Index i) = PG.SELit $ tshow i
orderByOperators ::
NonEmpty (Definition P.EnumValueInfo, (BasicOrderType ('Postgres pgKind), NullsOrderType ('Postgres pgKind)))
orderByOperators =
NE.fromList
[ ( define $$(G.litName "asc") "in ascending order, nulls last",
(PG.OTAsc, PG.NLast)
),
( define $$(G.litName "asc_nulls_first") "in ascending order, nulls first",
(PG.OTAsc, PG.NFirst)
),
( define $$(G.litName "asc_nulls_last") "in ascending order, nulls last",
(PG.OTAsc, PG.NLast)
),
( define $$(G.litName "desc") "in descending order, nulls first",
(PG.OTDesc, PG.NFirst)
),
( define $$(G.litName "desc_nulls_first") "in descending order, nulls first",
(PG.OTDesc, PG.NFirst)
),
( define $$(G.litName "desc_nulls_last") "in descending order, nulls last",
(PG.OTDesc, PG.NLast)
)
]
where
define name desc = P.mkDefinition name (Just desc) P.EnumValueInfo
comparisonExps ::
forall pgKind m n r.
( BackendSchema ('Postgres pgKind),
MonadSchema n m,
MonadError QErr m,
MonadReader r m,
Has QueryContext r,
Has MkTypename r
) =>
ColumnType ('Postgres pgKind) ->
m (Parser 'Input n [ComparisonExp ('Postgres pgKind)])
comparisonExps = P.memoize 'comparisonExps \columnType -> do
-- see Note [Columns in comparison expression are never nullable]
collapseIfNull <- asks $ qcDangerousBooleanCollapse . getter
-- parsers used for comparison arguments
geogInputParser <- geographyWithinDistanceInput
geomInputParser <- geometryWithinDistanceInput
ignInputParser <- intersectsGeomNbandInput
ingInputParser <- intersectsNbandGeomInput
typedParser <- columnParser columnType (G.Nullability False)
nullableTextParser <- columnParser (ColumnScalar PGText) (G.Nullability True)
textParser <- columnParser (ColumnScalar PGText) (G.Nullability False)
-- `lquery` represents a regular-expression-like pattern for matching `ltree` values.
lqueryParser <- columnParser (ColumnScalar PGLquery) (G.Nullability False)
-- `ltxtquery` represents a full-text-search-like pattern for matching `ltree` values.
ltxtqueryParser <- columnParser (ColumnScalar PGLtxtquery) (G.Nullability False)
maybeCastParser <- castExp columnType
let name = P.Typename $ P.getName typedParser <> $$(G.litName "_comparison_exp")
desc =
G.Description $
"Boolean expression to compare columns of type "
<> P.getName typedParser
<<> ". All fields are combined with logical 'AND'."
textListParser = fmap openValueOrigin <$> P.list textParser
server: remove remnants of query plan caching (fix #1795) Query plan caching was introduced by - I believe - hasura/graphql-engine#1934 in order to reduce the query response latency. During the development of PDV in hasura/graphql-engine#4111, it was found out that the new architecture (for which query plan caching wasn't implemented) performed comparably to the pre-PDV architecture with caching. Hence, it was decided to leave query plan caching until some day in the future when it was deemed necessary. Well, we're in the future now, and there still isn't a convincing argument for query plan caching. So the time has come to remove some references to query plan caching from the codebase. For the most part, any code being removed would probably not be very well suited to the post-PDV architecture of query execution, so arguably not much is lost. Apart from simplifying the code, this PR will contribute towards making the GraphQL schema generation more modular, testable, and easier to profile. I'd like to eventually work towards a situation in which it's easy to generate a GraphQL schema parser *in isolation*, without being connected to a database, and then parse a GraphQL query *in isolation*, without even listening any HTTP port. It is important that both of these operations can be examined in detail, and in isolation, since they are two major performance bottlenecks, as well as phases where many important upcoming features hook into. Implementation The following have been removed: - The entirety of `server/src-lib/Hasura/GraphQL/Execute/Plan.hs` - The core phases of query parsing and execution no longer have any references to query plan caching. Note that this is not to be confused with query *response* caching, which is not affected by this PR. This includes removal of the types: - - `Opaque`, which is replaced by a tuple. Note that the old implementation was broken and did not adequately hide the constructors. - - `QueryReusability` (and the `markNotReusable` method). Notably, the implementation of the `ParseT` monad now consists of two, rather than three, monad transformers. - Cache-related tests (in `server/src-test/Hasura/CacheBoundedSpec.hs`) have been removed . - References to query plan caching in the documentation. - The `planCacheOptions` in the `TenantConfig` type class was removed. However, during parsing, unrecognized fields in the YAML config get ignored, so this does not cause a breaking change. (Confirmed manually, as well as in consultation with @sordina.) - The metrics no longer send cache hit/miss messages. There are a few places in which one can still find references to query plan caching: - We still accept the `--query-plan-cache-size` command-line option for backwards compatibility. The `HASURA_QUERY_PLAN_CACHE_SIZE` environment variable is not read. https://github.com/hasura/graphql-engine-mono/pull/1815 GitOrigin-RevId: 17d92b254ec093c62a7dfeec478658ede0813eb7
2021-07-27 14:51:52 +03:00
columnListParser = fmap openValueOrigin <$> P.list typedParser
pure $
P.object name (Just desc) $
fmap catMaybes $
sequenceA $
concat
[ flip (maybe []) maybeCastParser $ \castParser ->
[ P.fieldOptional $$(G.litName "_cast") Nothing (ACast <$> castParser)
],
-- Common ops for all types
equalityOperators
collapseIfNull
(mkParameter <$> typedParser)
(mkListLiteral columnType <$> columnListParser),
-- Comparison ops for non Raster types
guard (isScalarColumnWhere (/= PGRaster) columnType)
*> comparisonOperators
collapseIfNull
(mkParameter <$> typedParser),
-- Ops for Raster types
guard (isScalarColumnWhere (== PGRaster) columnType)
*> [ mkBoolOperator
collapseIfNull
$$(G.litName "_st_intersects_rast")
Nothing
(ABackendSpecific . ASTIntersectsRast . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_intersects_nband_geom")
Nothing
(ABackendSpecific . ASTIntersectsNbandGeom <$> ingInputParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_intersects_geom_nband")
Nothing
(ABackendSpecific . ASTIntersectsGeomNband <$> ignInputParser)
],
-- Ops for String like types
guard (isScalarColumnWhere isStringType columnType)
*> [ mkBoolOperator
collapseIfNull
$$(G.litName "_like")
(Just "does the column match the given pattern")
(ALIKE . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_nlike")
(Just "does the column NOT match the given pattern")
(ANLIKE . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_ilike")
(Just "does the column match the given case-insensitive pattern")
(ABackendSpecific . AILIKE . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_nilike")
(Just "does the column NOT match the given case-insensitive pattern")
(ABackendSpecific . ANILIKE . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_similar")
(Just "does the column match the given SQL regular expression")
(ABackendSpecific . ASIMILAR . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_nsimilar")
(Just "does the column NOT match the given SQL regular expression")
(ABackendSpecific . ANSIMILAR . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_regex")
(Just "does the column match the given POSIX regular expression, case sensitive")
(ABackendSpecific . AREGEX . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_iregex")
(Just "does the column match the given POSIX regular expression, case insensitive")
(ABackendSpecific . AIREGEX . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_nregex")
(Just "does the column NOT match the given POSIX regular expression, case sensitive")
(ABackendSpecific . ANREGEX . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_niregex")
(Just "does the column NOT match the given POSIX regular expression, case insensitive")
(ABackendSpecific . ANIREGEX . mkParameter <$> typedParser)
],
-- Ops for JSONB type
guard (isScalarColumnWhere (== PGJSONB) columnType)
*> [ mkBoolOperator
collapseIfNull
$$(G.litName "_contains")
(Just "does the column contain the given json value at the top level")
(ABackendSpecific . AContains . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_contained_in")
(Just "is the column contained in the given json value")
(ABackendSpecific . AContainedIn . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_has_key")
(Just "does the string exist as a top-level key in the column")
(ABackendSpecific . AHasKey . mkParameter <$> nullableTextParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_has_keys_any")
(Just "do any of these strings exist as top-level keys in the column")
(ABackendSpecific . AHasKeysAny . mkListLiteral (ColumnScalar PGText) <$> textListParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_has_keys_all")
(Just "do all of these strings exist as top-level keys in the column")
(ABackendSpecific . AHasKeysAll . mkListLiteral (ColumnScalar PGText) <$> textListParser)
],
-- Ops for Geography type
guard (isScalarColumnWhere (== PGGeography) columnType)
*> [ mkBoolOperator
collapseIfNull
$$(G.litName "_st_intersects")
(Just "does the column spatially intersect the given geography value")
(ABackendSpecific . ASTIntersects . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_d_within")
(Just "is the column within a given distance from the given geography value")
(ABackendSpecific . ASTDWithinGeog <$> geogInputParser)
],
-- Ops for Geometry type
guard (isScalarColumnWhere (== PGGeometry) columnType)
*> [ mkBoolOperator
collapseIfNull
$$(G.litName "_st_contains")
(Just "does the column contain the given geometry value")
(ABackendSpecific . ASTContains . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_crosses")
(Just "does the column cross the given geometry value")
(ABackendSpecific . ASTCrosses . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_equals")
(Just "is the column equal to given geometry value (directionality is ignored)")
(ABackendSpecific . ASTEquals . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_overlaps")
(Just "does the column 'spatially overlap' (intersect but not completely contain) the given geometry value")
(ABackendSpecific . ASTOverlaps . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_touches")
(Just "does the column have atleast one point in common with the given geometry value")
(ABackendSpecific . ASTTouches . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_within")
(Just "is the column contained in the given geometry value")
(ABackendSpecific . ASTWithin . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_intersects")
(Just "does the column spatially intersect the given geometry value")
(ABackendSpecific . ASTIntersects . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_3d_intersects")
(Just "does the column spatially intersect the given geometry value in 3D")
(ABackendSpecific . AST3DIntersects . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_d_within")
(Just "is the column within a given distance from the given geometry value")
(ABackendSpecific . ASTDWithinGeom <$> geomInputParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_st_3d_d_within")
(Just "is the column within a given 3D distance from the given geometry value")
(ABackendSpecific . AST3DDWithinGeom <$> geomInputParser)
],
-- Ops for Ltree type
guard (isScalarColumnWhere (== PGLtree) columnType)
*> [ mkBoolOperator
collapseIfNull
$$(G.litName "_ancestor")
(Just "is the left argument an ancestor of right (or equal)?")
(ABackendSpecific . AAncestor . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_ancestor_any")
(Just "does array contain an ancestor of `ltree`?")
(ABackendSpecific . AAncestorAny . mkListLiteral columnType <$> columnListParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_descendant")
(Just "is the left argument a descendant of right (or equal)?")
(ABackendSpecific . ADescendant . mkParameter <$> typedParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_descendant_any")
(Just "does array contain a descendant of `ltree`?")
(ABackendSpecific . ADescendantAny . mkListLiteral columnType <$> columnListParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_matches")
(Just "does `ltree` match `lquery`?")
(ABackendSpecific . AMatches . mkParameter <$> lqueryParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_matches_any")
(Just "does `ltree` match any `lquery` in array?")
(ABackendSpecific . AMatchesAny . mkListLiteral (ColumnScalar PGLquery) <$> textListParser),
mkBoolOperator
collapseIfNull
$$(G.litName "_matches_fulltext")
(Just "does `ltree` match `ltxtquery`?")
(ABackendSpecific . AMatchesFulltext . mkParameter <$> ltxtqueryParser)
]
]
where
mkListLiteral :: ColumnType ('Postgres pgKind) -> [ColumnValue ('Postgres pgKind)] -> UnpreparedValue ('Postgres pgKind)
mkListLiteral columnType columnValues =
P.UVLiteral $
SETyAnn
(SEArray $ txtEncoder . cvValue <$> columnValues)
(mkTypeAnn $ CollectableTypeArray $ unsafePGColumnToBackend columnType)
castExp :: ColumnType ('Postgres pgKind) -> m (Maybe (Parser 'Input n (CastExp ('Postgres pgKind) (UnpreparedValue ('Postgres pgKind)))))
castExp sourceType = do
let maybeScalars = case sourceType of
ColumnScalar PGGeography -> Just (PGGeography, PGGeometry)
ColumnScalar PGGeometry -> Just (PGGeometry, PGGeography)
_ -> Nothing
forM maybeScalars $ \(sourceScalar, targetScalar) -> do
sourceName <- mkScalarTypeName sourceScalar <&> (<> $$(G.litName "_cast_exp"))
targetName <- mkScalarTypeName targetScalar
targetOpExps <- comparisonExps $ ColumnScalar targetScalar
let field = P.fieldOptional targetName Nothing $ (targetScalar,) <$> targetOpExps
pure $ P.object (P.Typename sourceName) Nothing $ M.fromList . maybeToList <$> field
geographyWithinDistanceInput ::
forall pgKind m n r.
(MonadSchema n m, MonadError QErr m, MonadReader r m, Has MkTypename r) =>
m (Parser 'Input n (DWithinGeogOp (UnpreparedValue ('Postgres pgKind))))
geographyWithinDistanceInput = do
geographyParser <- columnParser (ColumnScalar PGGeography) (G.Nullability False)
-- FIXME
-- It doesn't make sense for this value to be nullable; it only is for
-- backwards compatibility; if an explicit Null value is given, it will be
-- forwarded to the underlying SQL function, that in turns treat a null value
-- as an error. We can fix this by rejecting explicit null values, by marking
-- this field non-nullable in a future release.
booleanParser <- columnParser (ColumnScalar PGBoolean) (G.Nullability True)
floatParser <- columnParser (ColumnScalar PGFloat) (G.Nullability False)
pure $
P.object (P.Typename $$(G.litName "st_d_within_geography_input")) Nothing $
DWithinGeogOp <$> (mkParameter <$> P.field $$(G.litName "distance") Nothing floatParser)
<*> (mkParameter <$> P.field $$(G.litName "from") Nothing geographyParser)
<*> (mkParameter <$> P.fieldWithDefault $$(G.litName "use_spheroid") Nothing (G.VBoolean True) booleanParser)
geometryWithinDistanceInput ::
forall pgKind m n r.
(MonadSchema n m, MonadError QErr m, MonadReader r m, Has MkTypename r) =>
m (Parser 'Input n (DWithinGeomOp (UnpreparedValue ('Postgres pgKind))))
geometryWithinDistanceInput = do
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
floatParser <- columnParser (ColumnScalar PGFloat) (G.Nullability False)
pure $
P.object (P.Typename $$(G.litName "st_d_within_input")) Nothing $
DWithinGeomOp <$> (mkParameter <$> P.field $$(G.litName "distance") Nothing floatParser)
<*> (mkParameter <$> P.field $$(G.litName "from") Nothing geometryParser)
intersectsNbandGeomInput ::
forall pgKind m n r.
(MonadSchema n m, MonadError QErr m, MonadReader r m, Has MkTypename r) =>
m (Parser 'Input n (STIntersectsNbandGeommin (UnpreparedValue ('Postgres pgKind))))
intersectsNbandGeomInput = do
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
integerParser <- columnParser (ColumnScalar PGInteger) (G.Nullability False)
pure $
P.object (P.Typename $$(G.litName "st_intersects_nband_geom_input")) Nothing $
STIntersectsNbandGeommin <$> (mkParameter <$> P.field $$(G.litName "nband") Nothing integerParser)
<*> (mkParameter <$> P.field $$(G.litName "geommin") Nothing geometryParser)
intersectsGeomNbandInput ::
forall pgKind m n r.
(MonadSchema n m, MonadError QErr m, MonadReader r m, Has MkTypename r) =>
m (Parser 'Input n (STIntersectsGeomminNband (UnpreparedValue ('Postgres pgKind))))
intersectsGeomNbandInput = do
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
integerParser <- columnParser (ColumnScalar PGInteger) (G.Nullability False)
pure $
P.object (P.Typename $$(G.litName "st_intersects_geom_nband_input")) Nothing $
STIntersectsGeomminNband
<$> (mkParameter <$> P.field $$(G.litName "geommin") Nothing geometryParser)
<*> (fmap mkParameter <$> P.fieldOptional $$(G.litName "nband") Nothing integerParser)
mkCountType :: Maybe Bool -> Maybe [Column ('Postgres pgKind)] -> CountType ('Postgres pgKind)
mkCountType _ Nothing = PG.CTStar
mkCountType (Just True) (Just cols) = PG.CTDistinct cols
mkCountType _ (Just cols) = PG.CTSimple cols
-- | Various update operators
updateOperators ::
forall pgKind m n r.
(BackendSchema ('Postgres pgKind), MonadSchema n m, MonadTableInfo r m, Has MkTypename r) =>
-- | table info
TableInfo ('Postgres pgKind) ->
-- | update permissions of the table
UpdPermInfo ('Postgres pgKind) ->
m (InputFieldsParser n [(Column ('Postgres pgKind), IR.UpdOpExpG (UnpreparedValue ('Postgres pgKind)))])
updateOperators tableInfo updatePermissions = do
tableGQLName <- getTableGQLName tableInfo
columns <- tableUpdateColumns tableInfo updatePermissions
let numericCols = onlyNumCols columns
jsonCols = onlyJSONBCols columns
parsers <-
catMaybes
<$> sequenceA
[ updateOperator
tableGQLName
$$(G.litName "_set")
typedParser
IR.UpdSet
columns
"sets the columns of the filtered rows to the given values"
(G.Description $ "input type for updating data in table " <>> tableName),
updateOperator
tableGQLName
$$(G.litName "_inc")
typedParser
IR.UpdInc
numericCols
"increments the numeric columns with given value of the filtered values"
(G.Description $ "input type for incrementing numeric columns in table " <>> tableName),
let desc = "prepend existing jsonb value of filtered columns with new jsonb value"
in updateOperator
tableGQLName
$$(G.litName "_prepend")
typedParser
IR.UpdPrepend
jsonCols
desc
desc,
let desc = "append existing jsonb value of filtered columns with new jsonb value"
in updateOperator
tableGQLName
$$(G.litName "_append")
typedParser
IR.UpdAppend
jsonCols
desc
desc,
let desc = "delete key/value pair or string element. key/value pairs are matched based on their key value"
in updateOperator
tableGQLName
$$(G.litName "_delete_key")
nullableTextParser
IR.UpdDeleteKey
jsonCols
desc
desc,
let desc =
"delete the array element with specified index (negative integers count from the end). "
<> "throws an error if top level container is not an array"
in updateOperator
tableGQLName
$$(G.litName "_delete_elem")
nonNullableIntParser
IR.UpdDeleteElem
jsonCols
desc
desc,
let desc = "delete the field or element with specified path (for JSON arrays, negative integers count from the end)"
in updateOperator
tableGQLName
$$(G.litName "_delete_at_path")
(fmap P.list . nonNullableTextParser)
IR.UpdDeleteAtPath
jsonCols
desc
desc
]
let allowedOperators = fst <$> parsers
pure $
fmap catMaybes (sequenceA $ snd <$> parsers)
`P.bindFields` \opExps -> do
-- there needs to be at least one operator in the update, even if it is empty
let presetColumns = Map.toList $ IR.UpdSet . partialSQLExpToUnpreparedValue <$> upiSet updatePermissions
when (null opExps && null presetColumns) $
parseError $
"at least any one of " <> commaSeparated allowedOperators <> " is expected"
-- no column should appear twice
let flattenedExps = concat opExps
erroneousExps = OMap.filter ((> 1) . length) $ OMap.groupTuples flattenedExps
unless (OMap.null erroneousExps) $
parseError $
"column found in multiple operators; "
<> T.intercalate
". "
[ dquote columnName <> " in " <> commaSeparated (IR.updateOperatorText <$> ops)
| (columnName, ops) <- OMap.toList erroneousExps
]
pure $ presetColumns <> flattenedExps
where
tableName = tableInfoName tableInfo
typedParser columnInfo = fmap P.mkParameter <$> columnParser (pgiType columnInfo) (G.Nullability $ pgiIsNullable columnInfo)
nonNullableTextParser _ = fmap P.mkParameter <$> columnParser (ColumnScalar PGText) (G.Nullability False)
nullableTextParser _ = fmap P.mkParameter <$> columnParser (ColumnScalar PGText) (G.Nullability True)
nonNullableIntParser _ = fmap P.mkParameter <$> columnParser (ColumnScalar PGInteger) (G.Nullability False)
onlyJSONBCols = filter (isScalarColumnWhere (== PGJSONB) . pgiType)
updateOperator ::
G.Name ->
G.Name ->
(ColumnInfo b -> m (Parser 'Both n a)) ->
(a -> IR.UpdOpExpG (UnpreparedValue b)) ->
[ColumnInfo b] ->
G.Description ->
G.Description ->
m (Maybe (Text, InputFieldsParser n (Maybe [(Column b, IR.UpdOpExpG (UnpreparedValue b))])))
updateOperator tableGQLName opName mkParser updOpExp columns opDesc objDesc =
whenMaybe (not $ null columns) do
fields <- for columns \columnInfo -> do
let fieldName = pgiName columnInfo
fieldDesc = pgiDescription columnInfo
fieldParser <- mkParser columnInfo
pure $
P.fieldOptional fieldName fieldDesc fieldParser
`mapField` \value -> (pgiColumn columnInfo, updOpExp value)
objName <- P.mkTypename $ tableGQLName <> opName <> $$(G.litName "_input")
pure $
(G.unName opName,) $
P.fieldOptional opName (Just opDesc) $
P.object objName (Just objDesc) $
catMaybes <$> sequenceA fields