2021-04-22 00:44:37 +03:00
|
|
|
|
{-# LANGUAGE UndecidableInstances #-}
|
2021-09-24 01:56:37 +03:00
|
|
|
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
2021-02-23 20:37:27 +03:00
|
|
|
|
|
2021-05-21 05:46:58 +03:00
|
|
|
|
module Hasura.Backends.Postgres.Instances.Schema
|
|
|
|
|
(
|
2021-09-24 01:56:37 +03:00
|
|
|
|
)
|
|
|
|
|
where
|
|
|
|
|
|
|
|
|
|
import Data.Aeson qualified as J
|
|
|
|
|
import Data.Has
|
|
|
|
|
import Data.HashMap.Strict qualified as Map
|
|
|
|
|
import Data.HashMap.Strict.Extended qualified as M
|
|
|
|
|
import Data.List.NonEmpty qualified as NE
|
|
|
|
|
import Data.Parser.JSONPath
|
|
|
|
|
import Data.Text qualified as T
|
|
|
|
|
import Data.Text.Extended
|
2021-11-18 21:02:58 +03:00
|
|
|
|
import Hasura.Backends.Postgres.SQL.DML as PG hiding (CountType, incOp)
|
2021-09-24 01:56:37 +03:00
|
|
|
|
import Hasura.Backends.Postgres.SQL.Types as PG hiding (FunctionName, TableName)
|
|
|
|
|
import Hasura.Backends.Postgres.SQL.Value as PG
|
|
|
|
|
import Hasura.Backends.Postgres.Types.BoolExp
|
|
|
|
|
import Hasura.Backends.Postgres.Types.Column
|
2021-11-18 21:02:58 +03:00
|
|
|
|
import Hasura.Backends.Postgres.Types.Update as PGIR
|
2021-09-24 01:56:37 +03:00
|
|
|
|
import Hasura.Base.Error
|
|
|
|
|
import Hasura.GraphQL.Parser hiding (EnumValueInfo, field)
|
|
|
|
|
import Hasura.GraphQL.Parser qualified as P
|
|
|
|
|
import Hasura.GraphQL.Parser.Internal.Parser hiding (field)
|
|
|
|
|
import Hasura.GraphQL.Schema.Backend
|
|
|
|
|
( BackendSchema,
|
|
|
|
|
ComparisonExp,
|
|
|
|
|
MonadBuildSchema,
|
|
|
|
|
)
|
|
|
|
|
import Hasura.GraphQL.Schema.Backend qualified as BS
|
|
|
|
|
import Hasura.GraphQL.Schema.BoolExp
|
|
|
|
|
import Hasura.GraphQL.Schema.Build qualified as GSB
|
|
|
|
|
import Hasura.GraphQL.Schema.Common
|
2021-10-01 15:52:19 +03:00
|
|
|
|
import Hasura.GraphQL.Schema.Mutation qualified as GSB
|
2021-09-24 01:56:37 +03:00
|
|
|
|
import Hasura.GraphQL.Schema.Select
|
2021-11-26 16:47:12 +03:00
|
|
|
|
import Hasura.GraphQL.Schema.Update qualified as SU
|
2021-09-24 01:56:37 +03:00
|
|
|
|
import Hasura.Prelude
|
|
|
|
|
import Hasura.RQL.IR
|
|
|
|
|
import Hasura.RQL.IR.Select qualified as IR
|
|
|
|
|
import Hasura.RQL.Types
|
|
|
|
|
import Hasura.SQL.Types
|
|
|
|
|
import Language.GraphQL.Draft.Syntax qualified as G
|
2021-02-23 20:37:27 +03:00
|
|
|
|
|
|
|
|
|
----------------------------------------------------------------
|
|
|
|
|
-- BackendSchema instance
|
|
|
|
|
|
2021-04-22 00:44:37 +03:00
|
|
|
|
-- | This class is an implementation detail of 'BackendSchema'.
|
|
|
|
|
-- Some functions of 'BackendSchema' differ across different Postgres "kinds",
|
|
|
|
|
-- or call to functions (such as those related to Relay) that have not been
|
|
|
|
|
-- generalized to all kinds of Postgres and still explicitly work on Vanilla
|
2021-11-18 21:02:58 +03:00
|
|
|
|
-- Postgres. This class allows each "kind" to specify its own specific
|
2021-04-22 00:44:37 +03:00
|
|
|
|
-- implementation. All common code is directly part of `BackendSchema`.
|
2021-11-18 21:02:58 +03:00
|
|
|
|
--
|
|
|
|
|
-- Note: Users shouldn't ever put this as a constraint. Use `BackendSchema
|
|
|
|
|
-- ('Postgres pgKind)` instead.
|
2021-04-22 00:44:37 +03:00
|
|
|
|
class PostgresSchema (pgKind :: PostgresKind) where
|
2021-09-24 01:56:37 +03:00
|
|
|
|
pgkBuildTableRelayQueryFields ::
|
|
|
|
|
BS.MonadBuildSchema ('Postgres pgKind) r m n =>
|
|
|
|
|
SourceName ->
|
|
|
|
|
TableName ('Postgres pgKind) ->
|
|
|
|
|
TableInfo ('Postgres pgKind) ->
|
|
|
|
|
G.Name ->
|
|
|
|
|
NESeq (ColumnInfo ('Postgres pgKind)) ->
|
|
|
|
|
SelPermInfo ('Postgres pgKind) ->
|
2021-12-07 16:12:02 +03:00
|
|
|
|
m [FieldParser n (QueryDB ('Postgres pgKind) (RemoteRelationshipField UnpreparedValue) (UnpreparedValue ('Postgres pgKind)))]
|
2021-09-24 01:56:37 +03:00
|
|
|
|
pgkBuildFunctionRelayQueryFields ::
|
|
|
|
|
BS.MonadBuildSchema ('Postgres pgKind) r m n =>
|
|
|
|
|
SourceName ->
|
|
|
|
|
FunctionName ('Postgres pgKind) ->
|
|
|
|
|
FunctionInfo ('Postgres pgKind) ->
|
|
|
|
|
TableName ('Postgres pgKind) ->
|
|
|
|
|
NESeq (ColumnInfo ('Postgres pgKind)) ->
|
|
|
|
|
SelPermInfo ('Postgres pgKind) ->
|
2021-12-07 16:12:02 +03:00
|
|
|
|
m [FieldParser n (QueryDB ('Postgres pgKind) (RemoteRelationshipField UnpreparedValue) (UnpreparedValue ('Postgres pgKind)))]
|
2021-09-24 01:56:37 +03:00
|
|
|
|
pgkRelayExtension ::
|
|
|
|
|
Maybe (XRelay ('Postgres pgKind))
|
|
|
|
|
pgkNode ::
|
|
|
|
|
BS.MonadBuildSchema ('Postgres pgKind) r m n =>
|
|
|
|
|
m
|
|
|
|
|
( Parser
|
|
|
|
|
'Output
|
|
|
|
|
n
|
|
|
|
|
( HashMap
|
|
|
|
|
( TableName ('Postgres pgKind)
|
|
|
|
|
)
|
|
|
|
|
( SourceName,
|
|
|
|
|
SourceConfig ('Postgres pgKind),
|
|
|
|
|
SelPermInfo ('Postgres pgKind),
|
|
|
|
|
PrimaryKeyColumns ('Postgres pgKind),
|
|
|
|
|
AnnotatedFields ('Postgres pgKind)
|
|
|
|
|
)
|
2021-04-22 00:44:37 +03:00
|
|
|
|
)
|
2021-09-24 01:56:37 +03:00
|
|
|
|
)
|
2021-04-22 00:44:37 +03:00
|
|
|
|
|
|
|
|
|
instance PostgresSchema 'Vanilla where
|
2021-09-24 01:56:37 +03:00
|
|
|
|
pgkBuildTableRelayQueryFields = buildTableRelayQueryFields
|
2021-04-22 00:44:37 +03:00
|
|
|
|
pgkBuildFunctionRelayQueryFields = buildFunctionRelayQueryFields
|
2021-06-09 16:02:15 +03:00
|
|
|
|
pgkRelayExtension = Just ()
|
2021-04-22 00:44:37 +03:00
|
|
|
|
pgkNode = nodePG
|
|
|
|
|
|
2021-05-21 05:46:58 +03:00
|
|
|
|
instance PostgresSchema 'Citus where
|
2021-11-26 00:07:53 +03:00
|
|
|
|
pgkBuildTableRelayQueryFields _ _ _ _ _ _ = pure []
|
|
|
|
|
pgkBuildFunctionRelayQueryFields _ _ _ _ _ _ = pure []
|
2021-06-09 16:02:15 +03:00
|
|
|
|
pgkRelayExtension = Nothing
|
2021-05-21 05:46:58 +03:00
|
|
|
|
pgkNode = undefined
|
|
|
|
|
|
|
|
|
|
-- postgres schema
|
2021-04-22 00:44:37 +03:00
|
|
|
|
|
|
|
|
|
instance
|
2021-11-08 21:11:44 +03:00
|
|
|
|
( Backend ('Postgres pgKind),
|
2021-09-24 01:56:37 +03:00
|
|
|
|
PostgresSchema pgKind
|
|
|
|
|
) =>
|
|
|
|
|
BackendSchema ('Postgres pgKind)
|
|
|
|
|
where
|
2021-02-23 20:37:27 +03:00
|
|
|
|
-- top level parsers
|
2021-09-24 01:56:37 +03:00
|
|
|
|
buildTableQueryFields = GSB.buildTableQueryFields
|
|
|
|
|
buildTableRelayQueryFields = pgkBuildTableRelayQueryFields
|
2021-02-23 20:37:27 +03:00
|
|
|
|
buildTableInsertMutationFields = GSB.buildTableInsertMutationFields
|
2021-11-25 00:39:42 +03:00
|
|
|
|
buildTableUpdateMutationFields = GSB.buildTableUpdateMutationFields (\ti updP -> fmap BackendUpdate <$> updateOperators ti updP) -- TODO: https://github.com/hasura/graphql-engine-mono/issues/2955
|
2021-02-23 20:37:27 +03:00
|
|
|
|
buildTableDeleteMutationFields = GSB.buildTableDeleteMutationFields
|
2021-09-24 01:56:37 +03:00
|
|
|
|
buildFunctionQueryFields = GSB.buildFunctionQueryFields
|
|
|
|
|
buildFunctionRelayQueryFields = pgkBuildFunctionRelayQueryFields
|
|
|
|
|
buildFunctionMutationFields = GSB.buildFunctionMutationFields
|
2021-06-15 18:53:20 +03:00
|
|
|
|
|
|
|
|
|
-- table components
|
|
|
|
|
tableArguments = defaultTableArgs
|
2021-10-01 15:52:19 +03:00
|
|
|
|
mkRelationshipParser = GSB.mkDefaultRelationshipParser ()
|
2021-06-15 18:53:20 +03:00
|
|
|
|
|
2021-02-23 20:37:27 +03:00
|
|
|
|
-- backend extensions
|
2021-09-24 01:56:37 +03:00
|
|
|
|
relayExtension = pgkRelayExtension @pgKind
|
|
|
|
|
nodesAggExtension = Just ()
|
2021-06-15 18:53:20 +03:00
|
|
|
|
|
2021-02-23 20:37:27 +03:00
|
|
|
|
-- indivdual components
|
2021-09-24 01:56:37 +03:00
|
|
|
|
columnParser = columnParser
|
2021-10-01 15:52:19 +03:00
|
|
|
|
conflictObject = GSB.defaultConflictObject ()
|
2021-09-24 01:56:37 +03:00
|
|
|
|
jsonPathArg = jsonPathArg
|
|
|
|
|
orderByOperators = orderByOperators
|
|
|
|
|
comparisonExps = comparisonExps
|
|
|
|
|
mkCountType = mkCountType
|
2021-02-23 20:37:27 +03:00
|
|
|
|
aggregateOrderByCountType = PG.PGInteger
|
2021-09-24 01:56:37 +03:00
|
|
|
|
computedField = computedFieldPG
|
|
|
|
|
node = pgkNode
|
2021-06-15 18:53:20 +03:00
|
|
|
|
|
2021-02-23 20:37:27 +03:00
|
|
|
|
-- SQL literals
|
|
|
|
|
columnDefaultValue = const PG.columnDefaultValue
|
|
|
|
|
|
2021-10-01 15:52:19 +03:00
|
|
|
|
-- Extra insert data
|
|
|
|
|
getExtraInsertData = const ()
|
|
|
|
|
|
2021-02-23 20:37:27 +03:00
|
|
|
|
----------------------------------------------------------------
|
|
|
|
|
-- Top level parsers
|
|
|
|
|
|
2021-09-24 01:56:37 +03:00
|
|
|
|
buildTableRelayQueryFields ::
|
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
MonadBuildSchema ('Postgres pgKind) r m n =>
|
|
|
|
|
SourceName ->
|
|
|
|
|
TableName ('Postgres pgKind) ->
|
|
|
|
|
TableInfo ('Postgres pgKind) ->
|
|
|
|
|
G.Name ->
|
|
|
|
|
NESeq (ColumnInfo ('Postgres pgKind)) ->
|
|
|
|
|
SelPermInfo ('Postgres pgKind) ->
|
2021-12-07 16:12:02 +03:00
|
|
|
|
m [FieldParser n (QueryDB ('Postgres pgKind) (RemoteRelationshipField UnpreparedValue) (UnpreparedValue ('Postgres pgKind)))]
|
2021-11-26 00:07:53 +03:00
|
|
|
|
buildTableRelayQueryFields sourceName tableName tableInfo gqlName pkeyColumns selPerms = do
|
|
|
|
|
let fieldDesc = Just $ G.Description $ "fetch data from the table: " <>> tableName
|
2021-10-29 17:42:07 +03:00
|
|
|
|
fieldName <- mkRootFieldName $ gqlName <> $$(G.litName "_connection")
|
2021-09-24 01:56:37 +03:00
|
|
|
|
fmap afold $
|
2021-11-26 00:07:53 +03:00
|
|
|
|
optionalFieldParser (QDBConnection) $
|
2021-09-24 01:56:37 +03:00
|
|
|
|
selectTableConnection sourceName tableInfo fieldName fieldDesc pkeyColumns selPerms
|
|
|
|
|
|
|
|
|
|
buildFunctionRelayQueryFields ::
|
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
MonadBuildSchema ('Postgres pgKind) r m n =>
|
|
|
|
|
SourceName ->
|
|
|
|
|
FunctionName ('Postgres pgKind) ->
|
|
|
|
|
FunctionInfo ('Postgres pgKind) ->
|
|
|
|
|
TableName ('Postgres pgKind) ->
|
|
|
|
|
NESeq (ColumnInfo ('Postgres pgKind)) ->
|
|
|
|
|
SelPermInfo ('Postgres pgKind) ->
|
2021-12-07 16:12:02 +03:00
|
|
|
|
m [FieldParser n (QueryDB ('Postgres pgKind) (RemoteRelationshipField UnpreparedValue) (UnpreparedValue ('Postgres pgKind)))]
|
2021-11-26 00:07:53 +03:00
|
|
|
|
buildFunctionRelayQueryFields sourceName functionName functionInfo tableName pkeyColumns selPerms = do
|
|
|
|
|
let fieldDesc = Just $ G.Description $ "execute function " <> functionName <<> " which returns " <>> tableName
|
2021-09-24 01:56:37 +03:00
|
|
|
|
fmap afold $
|
2021-11-26 00:07:53 +03:00
|
|
|
|
optionalFieldParser (QDBConnection) $
|
2021-10-07 16:02:19 +03:00
|
|
|
|
selectFunctionConnection sourceName functionInfo fieldDesc pkeyColumns selPerms
|
2021-02-23 20:37:27 +03:00
|
|
|
|
|
|
|
|
|
----------------------------------------------------------------
|
|
|
|
|
-- Individual components
|
|
|
|
|
|
2021-09-24 01:56:37 +03:00
|
|
|
|
columnParser ::
|
2021-10-29 17:42:07 +03:00
|
|
|
|
(MonadSchema n m, MonadError QErr m, MonadReader r m, Has P.MkTypename r) =>
|
2021-09-24 01:56:37 +03:00
|
|
|
|
ColumnType ('Postgres pgKind) ->
|
|
|
|
|
G.Nullability ->
|
|
|
|
|
m (Parser 'Both n (ValueWithOrigin (ColumnValue ('Postgres pgKind))))
|
2021-02-23 20:37:27 +03:00
|
|
|
|
columnParser columnType (G.Nullability isNullable) =
|
|
|
|
|
-- TODO(PDV): It might be worth memoizing this function even though it isn’t
|
|
|
|
|
-- recursive simply for performance reasons, since it’s likely to be hammered
|
|
|
|
|
-- during schema generation. Need to profile to see whether or not it’s a win.
|
server: remove remnants of query plan caching (fix #1795)
Query plan caching was introduced by - I believe - hasura/graphql-engine#1934 in order to reduce the query response latency. During the development of PDV in hasura/graphql-engine#4111, it was found out that the new architecture (for which query plan caching wasn't implemented) performed comparably to the pre-PDV architecture with caching. Hence, it was decided to leave query plan caching until some day in the future when it was deemed necessary.
Well, we're in the future now, and there still isn't a convincing argument for query plan caching. So the time has come to remove some references to query plan caching from the codebase. For the most part, any code being removed would probably not be very well suited to the post-PDV architecture of query execution, so arguably not much is lost.
Apart from simplifying the code, this PR will contribute towards making the GraphQL schema generation more modular, testable, and easier to profile. I'd like to eventually work towards a situation in which it's easy to generate a GraphQL schema parser *in isolation*, without being connected to a database, and then parse a GraphQL query *in isolation*, without even listening any HTTP port. It is important that both of these operations can be examined in detail, and in isolation, since they are two major performance bottlenecks, as well as phases where many important upcoming features hook into.
Implementation
The following have been removed:
- The entirety of `server/src-lib/Hasura/GraphQL/Execute/Plan.hs`
- The core phases of query parsing and execution no longer have any references to query plan caching. Note that this is not to be confused with query *response* caching, which is not affected by this PR. This includes removal of the types:
- - `Opaque`, which is replaced by a tuple. Note that the old implementation was broken and did not adequately hide the constructors.
- - `QueryReusability` (and the `markNotReusable` method). Notably, the implementation of the `ParseT` monad now consists of two, rather than three, monad transformers.
- Cache-related tests (in `server/src-test/Hasura/CacheBoundedSpec.hs`) have been removed .
- References to query plan caching in the documentation.
- The `planCacheOptions` in the `TenantConfig` type class was removed. However, during parsing, unrecognized fields in the YAML config get ignored, so this does not cause a breaking change. (Confirmed manually, as well as in consultation with @sordina.)
- The metrics no longer send cache hit/miss messages.
There are a few places in which one can still find references to query plan caching:
- We still accept the `--query-plan-cache-size` command-line option for backwards compatibility. The `HASURA_QUERY_PLAN_CACHE_SIZE` environment variable is not read.
https://github.com/hasura/graphql-engine-mono/pull/1815
GitOrigin-RevId: 17d92b254ec093c62a7dfeec478658ede0813eb7
2021-07-27 14:51:52 +03:00
|
|
|
|
peelWithOrigin . fmap (ColumnValue columnType) <$> case columnType of
|
2021-09-24 01:56:37 +03:00
|
|
|
|
ColumnScalar scalarType ->
|
|
|
|
|
possiblyNullable scalarType <$> do
|
|
|
|
|
-- We convert the value to JSON and use the FromJSON instance. This avoids
|
|
|
|
|
-- having two separate ways of parsing a value in the codebase, which
|
|
|
|
|
-- could lead to inconsistencies.
|
|
|
|
|
--
|
|
|
|
|
-- The mapping from postgres type to GraphQL scalar name is done by
|
|
|
|
|
-- 'mkScalarTypeName'. This is confusing, and we might want to fix it
|
|
|
|
|
-- later, as we will parse values differently here than how they'd be
|
|
|
|
|
-- parsed in other places using the same scalar name; for instance, we
|
|
|
|
|
-- will accept strings for postgres columns of type "Integer", despite the
|
|
|
|
|
-- fact that they will be represented as GraphQL ints, which otherwise do
|
|
|
|
|
-- not accept strings.
|
|
|
|
|
--
|
|
|
|
|
-- TODO: introduce new dedicated scalars for Postgres column types.
|
Refactor type name customization
Source typename customization (hasura/graphql-engine@aac64f2c81faa6a3aef4d0cf5fae97289ac4383e) introduced a mechanism to change certain names in the GraphQL schema that is exposed. In particular it allows last-minute modification of:
1. the names of some types, and
2. the names of some root fields.
The above two items are assigned distinct customization algorithms, and at times both algorithms are in scope. So a need to distinguish them is needed.
In the original design, this was addressed by introducing a newtype wrapper `Typename` around GraphQL `Name`s, dedicated to the names of types. However, in the majority of the codebase, type names are also represented by `Name`. For this reason, it was unavoidable to allow for easy conversion. This was supported by a `HasName Typename` instance, as well as by publishing the constructors of `Typename`.
This means that the type safety that newtypes can add is lost. In particular, it is now very easy to confuse type name customization with root field name customization.
This refactors the above design by instead introducing newtypes around the customization operations:
```haskell
newtype MkTypename = MkTypename {runMkTypename :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
newtype MkRootFieldName = MkRootFieldName {runMkRootFieldName :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
```
The `Monoid` instance allows easy composition of customization operations, piggybacking off of the type of `Endo`maps.
This design allows safe co-existence of the two customization algorithms, while avoiding the syntactic overhead of packing and unpacking newtypes.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2989
GitOrigin-RevId: da3a353a9b003ee40c8d0a1e02872e99d2edd3ca
2021-11-30 12:51:46 +03:00
|
|
|
|
name <- mkScalarTypeName scalarType
|
Remove `Unique` from `Definition`
GraphQL types can refer to each other in a circular way. The PDV framework used to use values of type `Unique` to recognize two fragments of GraphQL schema as being the same instance. Internally, this is based on `Data.Unique` from the `base` package, which simply increases a counter on every creation of a `Unique` object.
**NB**: The `Unique` values are _not_ used for knot tying the schema combinators themselves (i.e. `Parser`s). The knot tying for `Parser`s is purely based on keys provided to `memoizeOn`. The `Unique` values are _only_ used to recognize two pieces of GraphQL _schema_ as being identical. Originally, the idea was that this would help us with a perfectly correct identification of GraphQL types. But this fully correct equality checking of GraphQL types was never implemented, and does not seem to be necessary to prevent bugs.
Specifically, these `Unique` values are stored as part of `data Definition a`, which specifies a part of our internal abstract syntax tree for the GraphQL types that we expose. The `Unique` values get initialized by the `SchemaT` effect.
In #2894 and #2895, we are experimenting with how (parts of) the GraphQL types can be hidden behind certain permission predicates. This would allow a single GraphQL schema in memory to serve all roles, implementing #2711. The permission predicates get evaluated at query parsing time when we know what role is doing a certain request, thus outputting the correct GraphQL types for that role.
If the approach of #2895 is followed, then the `Definition` objects, and thus the `Unique` values, would be hidden behind the permission predicates. Since the permission predicates are evaluated only after the schema is already supposed to be built, this means that the permission predicates would prevent us from initializing the `Unique` values, rendering them useless.
The simplest remedy to this is to remove our usage of `Unique` altogether from the GraphQL schema and schema combinators. It doesn't serve a functional purpose, doesn't prevent bugs, and requires extra bookkeeping.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2980
GitOrigin-RevId: 50d3f9e0b9fbf578ac49c8fc773ba64a94b1f43d
2021-12-01 19:20:35 +03:00
|
|
|
|
let schemaType = P.NonNullable $ P.TNamed $ P.Definition name Nothing P.TIScalar
|
2021-09-24 01:56:37 +03:00
|
|
|
|
pure $
|
|
|
|
|
Parser
|
|
|
|
|
{ pType = schemaType,
|
|
|
|
|
pParser =
|
|
|
|
|
valueToJSON (P.toGraphQLType schemaType) >=> \case
|
|
|
|
|
J.Null -> parseError $ "unexpected null value for type " <>> name
|
|
|
|
|
value ->
|
|
|
|
|
runAesonParser (parsePGValue scalarType) value
|
2021-05-13 23:12:52 +03:00
|
|
|
|
`onLeft` (parseErrorWith ParseFailed . qeError)
|
2021-09-24 01:56:37 +03:00
|
|
|
|
}
|
2021-02-23 20:37:27 +03:00
|
|
|
|
ColumnEnumReference (EnumReference tableName enumValues) ->
|
|
|
|
|
case nonEmpty (Map.toList enumValues) of
|
|
|
|
|
Just enumValuesList -> do
|
2021-10-29 17:42:07 +03:00
|
|
|
|
name <- qualifiedObjectToName tableName <&> (<> $$(G.litName "_enum")) >>= P.mkTypename
|
2021-02-23 20:37:27 +03:00
|
|
|
|
pure $ possiblyNullable PGText $ P.enum name Nothing (mkEnumValue <$> enumValuesList)
|
|
|
|
|
Nothing -> throw400 ValidationFailed "empty enum values"
|
|
|
|
|
where
|
|
|
|
|
possiblyNullable scalarType
|
|
|
|
|
| isNullable = fmap (fromMaybe $ PGNull scalarType) . P.nullable
|
2021-09-24 01:56:37 +03:00
|
|
|
|
| otherwise = id
|
2021-02-23 20:37:27 +03:00
|
|
|
|
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, PGScalarValue)
|
|
|
|
|
mkEnumValue (EnumValue value, EnumValueInfo description) =
|
Remove `Unique` from `Definition`
GraphQL types can refer to each other in a circular way. The PDV framework used to use values of type `Unique` to recognize two fragments of GraphQL schema as being the same instance. Internally, this is based on `Data.Unique` from the `base` package, which simply increases a counter on every creation of a `Unique` object.
**NB**: The `Unique` values are _not_ used for knot tying the schema combinators themselves (i.e. `Parser`s). The knot tying for `Parser`s is purely based on keys provided to `memoizeOn`. The `Unique` values are _only_ used to recognize two pieces of GraphQL _schema_ as being identical. Originally, the idea was that this would help us with a perfectly correct identification of GraphQL types. But this fully correct equality checking of GraphQL types was never implemented, and does not seem to be necessary to prevent bugs.
Specifically, these `Unique` values are stored as part of `data Definition a`, which specifies a part of our internal abstract syntax tree for the GraphQL types that we expose. The `Unique` values get initialized by the `SchemaT` effect.
In #2894 and #2895, we are experimenting with how (parts of) the GraphQL types can be hidden behind certain permission predicates. This would allow a single GraphQL schema in memory to serve all roles, implementing #2711. The permission predicates get evaluated at query parsing time when we know what role is doing a certain request, thus outputting the correct GraphQL types for that role.
If the approach of #2895 is followed, then the `Definition` objects, and thus the `Unique` values, would be hidden behind the permission predicates. Since the permission predicates are evaluated only after the schema is already supposed to be built, this means that the permission predicates would prevent us from initializing the `Unique` values, rendering them useless.
The simplest remedy to this is to remove our usage of `Unique` altogether from the GraphQL schema and schema combinators. It doesn't serve a functional purpose, doesn't prevent bugs, and requires extra bookkeeping.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2980
GitOrigin-RevId: 50d3f9e0b9fbf578ac49c8fc773ba64a94b1f43d
2021-12-01 19:20:35 +03:00
|
|
|
|
( P.Definition value (G.Description <$> description) P.EnumValueInfo,
|
2021-09-24 01:56:37 +03:00
|
|
|
|
PGValText $ G.unName value
|
2021-02-23 20:37:27 +03:00
|
|
|
|
)
|
|
|
|
|
|
2021-09-24 01:56:37 +03:00
|
|
|
|
jsonPathArg ::
|
|
|
|
|
MonadParse n =>
|
|
|
|
|
ColumnType ('Postgres pgKind) ->
|
|
|
|
|
InputFieldsParser n (Maybe (IR.ColumnOp ('Postgres pgKind)))
|
2021-02-23 20:37:27 +03:00
|
|
|
|
jsonPathArg columnType
|
|
|
|
|
| isScalarColumnWhere PG.isJSONType columnType =
|
2021-09-24 01:56:37 +03:00
|
|
|
|
P.fieldOptional fieldName description P.string `P.bindFields` fmap join . traverse toColExp
|
2021-02-23 20:37:27 +03:00
|
|
|
|
| otherwise = pure Nothing
|
|
|
|
|
where
|
|
|
|
|
fieldName = $$(G.litName "path")
|
|
|
|
|
description = Just "JSON select path"
|
|
|
|
|
toColExp textValue = case parseJSONPath textValue of
|
2021-09-24 01:56:37 +03:00
|
|
|
|
Left err -> parseError $ T.pack $ "parse json path error: " ++ err
|
|
|
|
|
Right [] -> pure Nothing
|
2021-02-23 20:37:27 +03:00
|
|
|
|
Right jPaths -> pure $ Just $ IR.ColumnOp PG.jsonbPathOp $ PG.SEArray $ map elToColExp jPaths
|
2021-09-24 01:56:37 +03:00
|
|
|
|
elToColExp (Key k) = PG.SELit k
|
2021-02-23 20:37:27 +03:00
|
|
|
|
elToColExp (Index i) = PG.SELit $ tshow i
|
|
|
|
|
|
2021-09-24 01:56:37 +03:00
|
|
|
|
orderByOperators ::
|
|
|
|
|
NonEmpty (Definition P.EnumValueInfo, (BasicOrderType ('Postgres pgKind), NullsOrderType ('Postgres pgKind)))
|
|
|
|
|
orderByOperators =
|
|
|
|
|
NE.fromList
|
|
|
|
|
[ ( define $$(G.litName "asc") "in ascending order, nulls last",
|
|
|
|
|
(PG.OTAsc, PG.NLast)
|
|
|
|
|
),
|
|
|
|
|
( define $$(G.litName "asc_nulls_first") "in ascending order, nulls first",
|
|
|
|
|
(PG.OTAsc, PG.NFirst)
|
|
|
|
|
),
|
|
|
|
|
( define $$(G.litName "asc_nulls_last") "in ascending order, nulls last",
|
|
|
|
|
(PG.OTAsc, PG.NLast)
|
|
|
|
|
),
|
|
|
|
|
( define $$(G.litName "desc") "in descending order, nulls first",
|
|
|
|
|
(PG.OTDesc, PG.NFirst)
|
|
|
|
|
),
|
|
|
|
|
( define $$(G.litName "desc_nulls_first") "in descending order, nulls first",
|
|
|
|
|
(PG.OTDesc, PG.NFirst)
|
|
|
|
|
),
|
|
|
|
|
( define $$(G.litName "desc_nulls_last") "in descending order, nulls last",
|
|
|
|
|
(PG.OTDesc, PG.NLast)
|
|
|
|
|
)
|
|
|
|
|
]
|
2021-02-23 20:37:27 +03:00
|
|
|
|
where
|
Remove `Unique` from `Definition`
GraphQL types can refer to each other in a circular way. The PDV framework used to use values of type `Unique` to recognize two fragments of GraphQL schema as being the same instance. Internally, this is based on `Data.Unique` from the `base` package, which simply increases a counter on every creation of a `Unique` object.
**NB**: The `Unique` values are _not_ used for knot tying the schema combinators themselves (i.e. `Parser`s). The knot tying for `Parser`s is purely based on keys provided to `memoizeOn`. The `Unique` values are _only_ used to recognize two pieces of GraphQL _schema_ as being identical. Originally, the idea was that this would help us with a perfectly correct identification of GraphQL types. But this fully correct equality checking of GraphQL types was never implemented, and does not seem to be necessary to prevent bugs.
Specifically, these `Unique` values are stored as part of `data Definition a`, which specifies a part of our internal abstract syntax tree for the GraphQL types that we expose. The `Unique` values get initialized by the `SchemaT` effect.
In #2894 and #2895, we are experimenting with how (parts of) the GraphQL types can be hidden behind certain permission predicates. This would allow a single GraphQL schema in memory to serve all roles, implementing #2711. The permission predicates get evaluated at query parsing time when we know what role is doing a certain request, thus outputting the correct GraphQL types for that role.
If the approach of #2895 is followed, then the `Definition` objects, and thus the `Unique` values, would be hidden behind the permission predicates. Since the permission predicates are evaluated only after the schema is already supposed to be built, this means that the permission predicates would prevent us from initializing the `Unique` values, rendering them useless.
The simplest remedy to this is to remove our usage of `Unique` altogether from the GraphQL schema and schema combinators. It doesn't serve a functional purpose, doesn't prevent bugs, and requires extra bookkeeping.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2980
GitOrigin-RevId: 50d3f9e0b9fbf578ac49c8fc773ba64a94b1f43d
2021-12-01 19:20:35 +03:00
|
|
|
|
define name desc = P.Definition name (Just desc) P.EnumValueInfo
|
2021-02-23 20:37:27 +03:00
|
|
|
|
|
2021-09-24 01:56:37 +03:00
|
|
|
|
comparisonExps ::
|
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
( BackendSchema ('Postgres pgKind),
|
|
|
|
|
MonadSchema n m,
|
|
|
|
|
MonadError QErr m,
|
|
|
|
|
MonadReader r m,
|
2021-10-29 17:42:07 +03:00
|
|
|
|
Has QueryContext r,
|
|
|
|
|
Has MkTypename r
|
2021-09-24 01:56:37 +03:00
|
|
|
|
) =>
|
|
|
|
|
ColumnType ('Postgres pgKind) ->
|
|
|
|
|
m (Parser 'Input n [ComparisonExp ('Postgres pgKind)])
|
2021-02-23 20:37:27 +03:00
|
|
|
|
comparisonExps = P.memoize 'comparisonExps \columnType -> do
|
|
|
|
|
-- see Note [Columns in comparison expression are never nullable]
|
2021-04-08 11:25:11 +03:00
|
|
|
|
collapseIfNull <- asks $ qcDangerousBooleanCollapse . getter
|
|
|
|
|
|
|
|
|
|
-- parsers used for comparison arguments
|
2021-09-24 01:56:37 +03:00
|
|
|
|
geogInputParser <- geographyWithinDistanceInput
|
|
|
|
|
geomInputParser <- geometryWithinDistanceInput
|
|
|
|
|
ignInputParser <- intersectsGeomNbandInput
|
|
|
|
|
ingInputParser <- intersectsNbandGeomInput
|
|
|
|
|
typedParser <- columnParser columnType (G.Nullability False)
|
2021-02-23 20:37:27 +03:00
|
|
|
|
nullableTextParser <- columnParser (ColumnScalar PGText) (G.Nullability True)
|
2021-09-24 01:56:37 +03:00
|
|
|
|
textParser <- columnParser (ColumnScalar PGText) (G.Nullability False)
|
2021-02-25 14:05:51 +03:00
|
|
|
|
-- `lquery` represents a regular-expression-like pattern for matching `ltree` values.
|
2021-09-24 01:56:37 +03:00
|
|
|
|
lqueryParser <- columnParser (ColumnScalar PGLquery) (G.Nullability False)
|
2021-02-25 14:05:51 +03:00
|
|
|
|
-- `ltxtquery` represents a full-text-search-like pattern for matching `ltree` values.
|
2021-09-24 01:56:37 +03:00
|
|
|
|
ltxtqueryParser <- columnParser (ColumnScalar PGLtxtquery) (G.Nullability False)
|
|
|
|
|
maybeCastParser <- castExp columnType
|
Refactor type name customization
Source typename customization (hasura/graphql-engine@aac64f2c81faa6a3aef4d0cf5fae97289ac4383e) introduced a mechanism to change certain names in the GraphQL schema that is exposed. In particular it allows last-minute modification of:
1. the names of some types, and
2. the names of some root fields.
The above two items are assigned distinct customization algorithms, and at times both algorithms are in scope. So a need to distinguish them is needed.
In the original design, this was addressed by introducing a newtype wrapper `Typename` around GraphQL `Name`s, dedicated to the names of types. However, in the majority of the codebase, type names are also represented by `Name`. For this reason, it was unavoidable to allow for easy conversion. This was supported by a `HasName Typename` instance, as well as by publishing the constructors of `Typename`.
This means that the type safety that newtypes can add is lost. In particular, it is now very easy to confuse type name customization with root field name customization.
This refactors the above design by instead introducing newtypes around the customization operations:
```haskell
newtype MkTypename = MkTypename {runMkTypename :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
newtype MkRootFieldName = MkRootFieldName {runMkRootFieldName :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
```
The `Monoid` instance allows easy composition of customization operations, piggybacking off of the type of `Endo`maps.
This design allows safe co-existence of the two customization algorithms, while avoiding the syntactic overhead of packing and unpacking newtypes.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2989
GitOrigin-RevId: da3a353a9b003ee40c8d0a1e02872e99d2edd3ca
2021-11-30 12:51:46 +03:00
|
|
|
|
let name = P.getName typedParser <> $$(G.litName "_comparison_exp")
|
2021-09-24 01:56:37 +03:00
|
|
|
|
desc =
|
|
|
|
|
G.Description $
|
|
|
|
|
"Boolean expression to compare columns of type "
|
|
|
|
|
<> P.getName typedParser
|
|
|
|
|
<<> ". All fields are combined with logical 'AND'."
|
|
|
|
|
textListParser = fmap openValueOrigin <$> P.list textParser
|
server: remove remnants of query plan caching (fix #1795)
Query plan caching was introduced by - I believe - hasura/graphql-engine#1934 in order to reduce the query response latency. During the development of PDV in hasura/graphql-engine#4111, it was found out that the new architecture (for which query plan caching wasn't implemented) performed comparably to the pre-PDV architecture with caching. Hence, it was decided to leave query plan caching until some day in the future when it was deemed necessary.
Well, we're in the future now, and there still isn't a convincing argument for query plan caching. So the time has come to remove some references to query plan caching from the codebase. For the most part, any code being removed would probably not be very well suited to the post-PDV architecture of query execution, so arguably not much is lost.
Apart from simplifying the code, this PR will contribute towards making the GraphQL schema generation more modular, testable, and easier to profile. I'd like to eventually work towards a situation in which it's easy to generate a GraphQL schema parser *in isolation*, without being connected to a database, and then parse a GraphQL query *in isolation*, without even listening any HTTP port. It is important that both of these operations can be examined in detail, and in isolation, since they are two major performance bottlenecks, as well as phases where many important upcoming features hook into.
Implementation
The following have been removed:
- The entirety of `server/src-lib/Hasura/GraphQL/Execute/Plan.hs`
- The core phases of query parsing and execution no longer have any references to query plan caching. Note that this is not to be confused with query *response* caching, which is not affected by this PR. This includes removal of the types:
- - `Opaque`, which is replaced by a tuple. Note that the old implementation was broken and did not adequately hide the constructors.
- - `QueryReusability` (and the `markNotReusable` method). Notably, the implementation of the `ParseT` monad now consists of two, rather than three, monad transformers.
- Cache-related tests (in `server/src-test/Hasura/CacheBoundedSpec.hs`) have been removed .
- References to query plan caching in the documentation.
- The `planCacheOptions` in the `TenantConfig` type class was removed. However, during parsing, unrecognized fields in the YAML config get ignored, so this does not cause a breaking change. (Confirmed manually, as well as in consultation with @sordina.)
- The metrics no longer send cache hit/miss messages.
There are a few places in which one can still find references to query plan caching:
- We still accept the `--query-plan-cache-size` command-line option for backwards compatibility. The `HASURA_QUERY_PLAN_CACHE_SIZE` environment variable is not read.
https://github.com/hasura/graphql-engine-mono/pull/1815
GitOrigin-RevId: 17d92b254ec093c62a7dfeec478658ede0813eb7
2021-07-27 14:51:52 +03:00
|
|
|
|
columnListParser = fmap openValueOrigin <$> P.list typedParser
|
2021-04-08 11:25:11 +03:00
|
|
|
|
|
2021-09-24 01:56:37 +03:00
|
|
|
|
pure $
|
|
|
|
|
P.object name (Just desc) $
|
|
|
|
|
fmap catMaybes $
|
|
|
|
|
sequenceA $
|
|
|
|
|
concat
|
|
|
|
|
[ flip (maybe []) maybeCastParser $ \castParser ->
|
|
|
|
|
[ P.fieldOptional $$(G.litName "_cast") Nothing (ACast <$> castParser)
|
|
|
|
|
],
|
|
|
|
|
-- Common ops for all types
|
|
|
|
|
equalityOperators
|
|
|
|
|
collapseIfNull
|
|
|
|
|
(mkParameter <$> typedParser)
|
|
|
|
|
(mkListLiteral columnType <$> columnListParser),
|
|
|
|
|
-- Comparison ops for non Raster types
|
|
|
|
|
guard (isScalarColumnWhere (/= PGRaster) columnType)
|
|
|
|
|
*> comparisonOperators
|
|
|
|
|
collapseIfNull
|
|
|
|
|
(mkParameter <$> typedParser),
|
|
|
|
|
-- Ops for Raster types
|
|
|
|
|
guard (isScalarColumnWhere (== PGRaster) columnType)
|
|
|
|
|
*> [ mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_intersects_rast")
|
|
|
|
|
Nothing
|
|
|
|
|
(ABackendSpecific . ASTIntersectsRast . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_intersects_nband_geom")
|
|
|
|
|
Nothing
|
|
|
|
|
(ABackendSpecific . ASTIntersectsNbandGeom <$> ingInputParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_intersects_geom_nband")
|
|
|
|
|
Nothing
|
|
|
|
|
(ABackendSpecific . ASTIntersectsGeomNband <$> ignInputParser)
|
|
|
|
|
],
|
|
|
|
|
-- Ops for String like types
|
|
|
|
|
guard (isScalarColumnWhere isStringType columnType)
|
|
|
|
|
*> [ mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_like")
|
|
|
|
|
(Just "does the column match the given pattern")
|
|
|
|
|
(ALIKE . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_nlike")
|
|
|
|
|
(Just "does the column NOT match the given pattern")
|
|
|
|
|
(ANLIKE . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_ilike")
|
|
|
|
|
(Just "does the column match the given case-insensitive pattern")
|
|
|
|
|
(ABackendSpecific . AILIKE . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_nilike")
|
|
|
|
|
(Just "does the column NOT match the given case-insensitive pattern")
|
|
|
|
|
(ABackendSpecific . ANILIKE . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_similar")
|
|
|
|
|
(Just "does the column match the given SQL regular expression")
|
|
|
|
|
(ABackendSpecific . ASIMILAR . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_nsimilar")
|
|
|
|
|
(Just "does the column NOT match the given SQL regular expression")
|
|
|
|
|
(ABackendSpecific . ANSIMILAR . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_regex")
|
|
|
|
|
(Just "does the column match the given POSIX regular expression, case sensitive")
|
|
|
|
|
(ABackendSpecific . AREGEX . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_iregex")
|
|
|
|
|
(Just "does the column match the given POSIX regular expression, case insensitive")
|
|
|
|
|
(ABackendSpecific . AIREGEX . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_nregex")
|
|
|
|
|
(Just "does the column NOT match the given POSIX regular expression, case sensitive")
|
|
|
|
|
(ABackendSpecific . ANREGEX . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_niregex")
|
|
|
|
|
(Just "does the column NOT match the given POSIX regular expression, case insensitive")
|
|
|
|
|
(ABackendSpecific . ANIREGEX . mkParameter <$> typedParser)
|
|
|
|
|
],
|
|
|
|
|
-- Ops for JSONB type
|
|
|
|
|
guard (isScalarColumnWhere (== PGJSONB) columnType)
|
|
|
|
|
*> [ mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_contains")
|
|
|
|
|
(Just "does the column contain the given json value at the top level")
|
|
|
|
|
(ABackendSpecific . AContains . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_contained_in")
|
|
|
|
|
(Just "is the column contained in the given json value")
|
|
|
|
|
(ABackendSpecific . AContainedIn . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_has_key")
|
|
|
|
|
(Just "does the string exist as a top-level key in the column")
|
|
|
|
|
(ABackendSpecific . AHasKey . mkParameter <$> nullableTextParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_has_keys_any")
|
|
|
|
|
(Just "do any of these strings exist as top-level keys in the column")
|
|
|
|
|
(ABackendSpecific . AHasKeysAny . mkListLiteral (ColumnScalar PGText) <$> textListParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_has_keys_all")
|
|
|
|
|
(Just "do all of these strings exist as top-level keys in the column")
|
|
|
|
|
(ABackendSpecific . AHasKeysAll . mkListLiteral (ColumnScalar PGText) <$> textListParser)
|
|
|
|
|
],
|
|
|
|
|
-- Ops for Geography type
|
|
|
|
|
guard (isScalarColumnWhere (== PGGeography) columnType)
|
|
|
|
|
*> [ mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_intersects")
|
|
|
|
|
(Just "does the column spatially intersect the given geography value")
|
|
|
|
|
(ABackendSpecific . ASTIntersects . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_d_within")
|
|
|
|
|
(Just "is the column within a given distance from the given geography value")
|
|
|
|
|
(ABackendSpecific . ASTDWithinGeog <$> geogInputParser)
|
|
|
|
|
],
|
|
|
|
|
-- Ops for Geometry type
|
|
|
|
|
guard (isScalarColumnWhere (== PGGeometry) columnType)
|
|
|
|
|
*> [ mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_contains")
|
|
|
|
|
(Just "does the column contain the given geometry value")
|
|
|
|
|
(ABackendSpecific . ASTContains . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_crosses")
|
|
|
|
|
(Just "does the column cross the given geometry value")
|
|
|
|
|
(ABackendSpecific . ASTCrosses . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_equals")
|
|
|
|
|
(Just "is the column equal to given geometry value (directionality is ignored)")
|
|
|
|
|
(ABackendSpecific . ASTEquals . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_overlaps")
|
|
|
|
|
(Just "does the column 'spatially overlap' (intersect but not completely contain) the given geometry value")
|
|
|
|
|
(ABackendSpecific . ASTOverlaps . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_touches")
|
|
|
|
|
(Just "does the column have atleast one point in common with the given geometry value")
|
|
|
|
|
(ABackendSpecific . ASTTouches . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_within")
|
|
|
|
|
(Just "is the column contained in the given geometry value")
|
|
|
|
|
(ABackendSpecific . ASTWithin . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_intersects")
|
|
|
|
|
(Just "does the column spatially intersect the given geometry value")
|
|
|
|
|
(ABackendSpecific . ASTIntersects . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_3d_intersects")
|
|
|
|
|
(Just "does the column spatially intersect the given geometry value in 3D")
|
|
|
|
|
(ABackendSpecific . AST3DIntersects . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_d_within")
|
|
|
|
|
(Just "is the column within a given distance from the given geometry value")
|
|
|
|
|
(ABackendSpecific . ASTDWithinGeom <$> geomInputParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_st_3d_d_within")
|
|
|
|
|
(Just "is the column within a given 3D distance from the given geometry value")
|
|
|
|
|
(ABackendSpecific . AST3DDWithinGeom <$> geomInputParser)
|
|
|
|
|
],
|
|
|
|
|
-- Ops for Ltree type
|
|
|
|
|
guard (isScalarColumnWhere (== PGLtree) columnType)
|
|
|
|
|
*> [ mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_ancestor")
|
|
|
|
|
(Just "is the left argument an ancestor of right (or equal)?")
|
|
|
|
|
(ABackendSpecific . AAncestor . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_ancestor_any")
|
|
|
|
|
(Just "does array contain an ancestor of `ltree`?")
|
|
|
|
|
(ABackendSpecific . AAncestorAny . mkListLiteral columnType <$> columnListParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_descendant")
|
|
|
|
|
(Just "is the left argument a descendant of right (or equal)?")
|
|
|
|
|
(ABackendSpecific . ADescendant . mkParameter <$> typedParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_descendant_any")
|
|
|
|
|
(Just "does array contain a descendant of `ltree`?")
|
|
|
|
|
(ABackendSpecific . ADescendantAny . mkListLiteral columnType <$> columnListParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_matches")
|
|
|
|
|
(Just "does `ltree` match `lquery`?")
|
|
|
|
|
(ABackendSpecific . AMatches . mkParameter <$> lqueryParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_matches_any")
|
|
|
|
|
(Just "does `ltree` match any `lquery` in array?")
|
|
|
|
|
(ABackendSpecific . AMatchesAny . mkListLiteral (ColumnScalar PGLquery) <$> textListParser),
|
|
|
|
|
mkBoolOperator
|
|
|
|
|
collapseIfNull
|
|
|
|
|
$$(G.litName "_matches_fulltext")
|
|
|
|
|
(Just "does `ltree` match `ltxtquery`?")
|
|
|
|
|
(ABackendSpecific . AMatchesFulltext . mkParameter <$> ltxtqueryParser)
|
|
|
|
|
]
|
|
|
|
|
]
|
2021-02-23 20:37:27 +03:00
|
|
|
|
where
|
2021-04-22 00:44:37 +03:00
|
|
|
|
mkListLiteral :: ColumnType ('Postgres pgKind) -> [ColumnValue ('Postgres pgKind)] -> UnpreparedValue ('Postgres pgKind)
|
2021-09-24 01:56:37 +03:00
|
|
|
|
mkListLiteral columnType columnValues =
|
|
|
|
|
P.UVLiteral $
|
|
|
|
|
SETyAnn
|
|
|
|
|
(SEArray $ txtEncoder . cvValue <$> columnValues)
|
|
|
|
|
(mkTypeAnn $ CollectableTypeArray $ unsafePGColumnToBackend columnType)
|
2021-02-23 20:37:27 +03:00
|
|
|
|
|
2021-04-22 00:44:37 +03:00
|
|
|
|
castExp :: ColumnType ('Postgres pgKind) -> m (Maybe (Parser 'Input n (CastExp ('Postgres pgKind) (UnpreparedValue ('Postgres pgKind)))))
|
2021-02-23 20:37:27 +03:00
|
|
|
|
castExp sourceType = do
|
|
|
|
|
let maybeScalars = case sourceType of
|
|
|
|
|
ColumnScalar PGGeography -> Just (PGGeography, PGGeometry)
|
2021-09-24 01:56:37 +03:00
|
|
|
|
ColumnScalar PGGeometry -> Just (PGGeometry, PGGeography)
|
|
|
|
|
_ -> Nothing
|
2021-02-23 20:37:27 +03:00
|
|
|
|
|
|
|
|
|
forM maybeScalars $ \(sourceScalar, targetScalar) -> do
|
2021-09-24 01:56:37 +03:00
|
|
|
|
sourceName <- mkScalarTypeName sourceScalar <&> (<> $$(G.litName "_cast_exp"))
|
|
|
|
|
targetName <- mkScalarTypeName targetScalar
|
2021-02-23 20:37:27 +03:00
|
|
|
|
targetOpExps <- comparisonExps $ ColumnScalar targetScalar
|
2021-09-24 01:56:37 +03:00
|
|
|
|
let field = P.fieldOptional targetName Nothing $ (targetScalar,) <$> targetOpExps
|
Refactor type name customization
Source typename customization (hasura/graphql-engine@aac64f2c81faa6a3aef4d0cf5fae97289ac4383e) introduced a mechanism to change certain names in the GraphQL schema that is exposed. In particular it allows last-minute modification of:
1. the names of some types, and
2. the names of some root fields.
The above two items are assigned distinct customization algorithms, and at times both algorithms are in scope. So a need to distinguish them is needed.
In the original design, this was addressed by introducing a newtype wrapper `Typename` around GraphQL `Name`s, dedicated to the names of types. However, in the majority of the codebase, type names are also represented by `Name`. For this reason, it was unavoidable to allow for easy conversion. This was supported by a `HasName Typename` instance, as well as by publishing the constructors of `Typename`.
This means that the type safety that newtypes can add is lost. In particular, it is now very easy to confuse type name customization with root field name customization.
This refactors the above design by instead introducing newtypes around the customization operations:
```haskell
newtype MkTypename = MkTypename {runMkTypename :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
newtype MkRootFieldName = MkRootFieldName {runMkRootFieldName :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
```
The `Monoid` instance allows easy composition of customization operations, piggybacking off of the type of `Endo`maps.
This design allows safe co-existence of the two customization algorithms, while avoiding the syntactic overhead of packing and unpacking newtypes.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2989
GitOrigin-RevId: da3a353a9b003ee40c8d0a1e02872e99d2edd3ca
2021-11-30 12:51:46 +03:00
|
|
|
|
pure $ P.object sourceName Nothing $ M.fromList . maybeToList <$> field
|
2021-02-23 20:37:27 +03:00
|
|
|
|
|
2021-09-24 01:56:37 +03:00
|
|
|
|
geographyWithinDistanceInput ::
|
2021-10-29 17:42:07 +03:00
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
(MonadSchema n m, MonadError QErr m, MonadReader r m, Has MkTypename r) =>
|
2021-09-24 01:56:37 +03:00
|
|
|
|
m (Parser 'Input n (DWithinGeogOp (UnpreparedValue ('Postgres pgKind))))
|
2021-02-23 20:37:27 +03:00
|
|
|
|
geographyWithinDistanceInput = do
|
|
|
|
|
geographyParser <- columnParser (ColumnScalar PGGeography) (G.Nullability False)
|
|
|
|
|
-- FIXME
|
|
|
|
|
-- It doesn't make sense for this value to be nullable; it only is for
|
|
|
|
|
-- backwards compatibility; if an explicit Null value is given, it will be
|
|
|
|
|
-- forwarded to the underlying SQL function, that in turns treat a null value
|
|
|
|
|
-- as an error. We can fix this by rejecting explicit null values, by marking
|
|
|
|
|
-- this field non-nullable in a future release.
|
2021-09-24 01:56:37 +03:00
|
|
|
|
booleanParser <- columnParser (ColumnScalar PGBoolean) (G.Nullability True)
|
|
|
|
|
floatParser <- columnParser (ColumnScalar PGFloat) (G.Nullability False)
|
|
|
|
|
pure $
|
Refactor type name customization
Source typename customization (hasura/graphql-engine@aac64f2c81faa6a3aef4d0cf5fae97289ac4383e) introduced a mechanism to change certain names in the GraphQL schema that is exposed. In particular it allows last-minute modification of:
1. the names of some types, and
2. the names of some root fields.
The above two items are assigned distinct customization algorithms, and at times both algorithms are in scope. So a need to distinguish them is needed.
In the original design, this was addressed by introducing a newtype wrapper `Typename` around GraphQL `Name`s, dedicated to the names of types. However, in the majority of the codebase, type names are also represented by `Name`. For this reason, it was unavoidable to allow for easy conversion. This was supported by a `HasName Typename` instance, as well as by publishing the constructors of `Typename`.
This means that the type safety that newtypes can add is lost. In particular, it is now very easy to confuse type name customization with root field name customization.
This refactors the above design by instead introducing newtypes around the customization operations:
```haskell
newtype MkTypename = MkTypename {runMkTypename :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
newtype MkRootFieldName = MkRootFieldName {runMkRootFieldName :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
```
The `Monoid` instance allows easy composition of customization operations, piggybacking off of the type of `Endo`maps.
This design allows safe co-existence of the two customization algorithms, while avoiding the syntactic overhead of packing and unpacking newtypes.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2989
GitOrigin-RevId: da3a353a9b003ee40c8d0a1e02872e99d2edd3ca
2021-11-30 12:51:46 +03:00
|
|
|
|
P.object $$(G.litName "st_d_within_geography_input") Nothing $
|
2021-09-24 01:56:37 +03:00
|
|
|
|
DWithinGeogOp <$> (mkParameter <$> P.field $$(G.litName "distance") Nothing floatParser)
|
|
|
|
|
<*> (mkParameter <$> P.field $$(G.litName "from") Nothing geographyParser)
|
|
|
|
|
<*> (mkParameter <$> P.fieldWithDefault $$(G.litName "use_spheroid") Nothing (G.VBoolean True) booleanParser)
|
|
|
|
|
|
|
|
|
|
geometryWithinDistanceInput ::
|
2021-10-29 17:42:07 +03:00
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
(MonadSchema n m, MonadError QErr m, MonadReader r m, Has MkTypename r) =>
|
2021-09-24 01:56:37 +03:00
|
|
|
|
m (Parser 'Input n (DWithinGeomOp (UnpreparedValue ('Postgres pgKind))))
|
2021-02-23 20:37:27 +03:00
|
|
|
|
geometryWithinDistanceInput = do
|
|
|
|
|
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
|
2021-09-24 01:56:37 +03:00
|
|
|
|
floatParser <- columnParser (ColumnScalar PGFloat) (G.Nullability False)
|
|
|
|
|
pure $
|
Refactor type name customization
Source typename customization (hasura/graphql-engine@aac64f2c81faa6a3aef4d0cf5fae97289ac4383e) introduced a mechanism to change certain names in the GraphQL schema that is exposed. In particular it allows last-minute modification of:
1. the names of some types, and
2. the names of some root fields.
The above two items are assigned distinct customization algorithms, and at times both algorithms are in scope. So a need to distinguish them is needed.
In the original design, this was addressed by introducing a newtype wrapper `Typename` around GraphQL `Name`s, dedicated to the names of types. However, in the majority of the codebase, type names are also represented by `Name`. For this reason, it was unavoidable to allow for easy conversion. This was supported by a `HasName Typename` instance, as well as by publishing the constructors of `Typename`.
This means that the type safety that newtypes can add is lost. In particular, it is now very easy to confuse type name customization with root field name customization.
This refactors the above design by instead introducing newtypes around the customization operations:
```haskell
newtype MkTypename = MkTypename {runMkTypename :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
newtype MkRootFieldName = MkRootFieldName {runMkRootFieldName :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
```
The `Monoid` instance allows easy composition of customization operations, piggybacking off of the type of `Endo`maps.
This design allows safe co-existence of the two customization algorithms, while avoiding the syntactic overhead of packing and unpacking newtypes.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2989
GitOrigin-RevId: da3a353a9b003ee40c8d0a1e02872e99d2edd3ca
2021-11-30 12:51:46 +03:00
|
|
|
|
P.object $$(G.litName "st_d_within_input") Nothing $
|
2021-09-24 01:56:37 +03:00
|
|
|
|
DWithinGeomOp <$> (mkParameter <$> P.field $$(G.litName "distance") Nothing floatParser)
|
|
|
|
|
<*> (mkParameter <$> P.field $$(G.litName "from") Nothing geometryParser)
|
|
|
|
|
|
|
|
|
|
intersectsNbandGeomInput ::
|
2021-10-29 17:42:07 +03:00
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
(MonadSchema n m, MonadError QErr m, MonadReader r m, Has MkTypename r) =>
|
2021-09-24 01:56:37 +03:00
|
|
|
|
m (Parser 'Input n (STIntersectsNbandGeommin (UnpreparedValue ('Postgres pgKind))))
|
2021-02-23 20:37:27 +03:00
|
|
|
|
intersectsNbandGeomInput = do
|
|
|
|
|
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
|
2021-09-24 01:56:37 +03:00
|
|
|
|
integerParser <- columnParser (ColumnScalar PGInteger) (G.Nullability False)
|
|
|
|
|
pure $
|
Refactor type name customization
Source typename customization (hasura/graphql-engine@aac64f2c81faa6a3aef4d0cf5fae97289ac4383e) introduced a mechanism to change certain names in the GraphQL schema that is exposed. In particular it allows last-minute modification of:
1. the names of some types, and
2. the names of some root fields.
The above two items are assigned distinct customization algorithms, and at times both algorithms are in scope. So a need to distinguish them is needed.
In the original design, this was addressed by introducing a newtype wrapper `Typename` around GraphQL `Name`s, dedicated to the names of types. However, in the majority of the codebase, type names are also represented by `Name`. For this reason, it was unavoidable to allow for easy conversion. This was supported by a `HasName Typename` instance, as well as by publishing the constructors of `Typename`.
This means that the type safety that newtypes can add is lost. In particular, it is now very easy to confuse type name customization with root field name customization.
This refactors the above design by instead introducing newtypes around the customization operations:
```haskell
newtype MkTypename = MkTypename {runMkTypename :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
newtype MkRootFieldName = MkRootFieldName {runMkRootFieldName :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
```
The `Monoid` instance allows easy composition of customization operations, piggybacking off of the type of `Endo`maps.
This design allows safe co-existence of the two customization algorithms, while avoiding the syntactic overhead of packing and unpacking newtypes.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2989
GitOrigin-RevId: da3a353a9b003ee40c8d0a1e02872e99d2edd3ca
2021-11-30 12:51:46 +03:00
|
|
|
|
P.object $$(G.litName "st_intersects_nband_geom_input") Nothing $
|
2021-09-24 01:56:37 +03:00
|
|
|
|
STIntersectsNbandGeommin <$> (mkParameter <$> P.field $$(G.litName "nband") Nothing integerParser)
|
|
|
|
|
<*> (mkParameter <$> P.field $$(G.litName "geommin") Nothing geometryParser)
|
|
|
|
|
|
|
|
|
|
intersectsGeomNbandInput ::
|
2021-10-29 17:42:07 +03:00
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
(MonadSchema n m, MonadError QErr m, MonadReader r m, Has MkTypename r) =>
|
2021-09-24 01:56:37 +03:00
|
|
|
|
m (Parser 'Input n (STIntersectsGeomminNband (UnpreparedValue ('Postgres pgKind))))
|
2021-02-23 20:37:27 +03:00
|
|
|
|
intersectsGeomNbandInput = do
|
|
|
|
|
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
|
2021-09-24 01:56:37 +03:00
|
|
|
|
integerParser <- columnParser (ColumnScalar PGInteger) (G.Nullability False)
|
|
|
|
|
pure $
|
Refactor type name customization
Source typename customization (hasura/graphql-engine@aac64f2c81faa6a3aef4d0cf5fae97289ac4383e) introduced a mechanism to change certain names in the GraphQL schema that is exposed. In particular it allows last-minute modification of:
1. the names of some types, and
2. the names of some root fields.
The above two items are assigned distinct customization algorithms, and at times both algorithms are in scope. So a need to distinguish them is needed.
In the original design, this was addressed by introducing a newtype wrapper `Typename` around GraphQL `Name`s, dedicated to the names of types. However, in the majority of the codebase, type names are also represented by `Name`. For this reason, it was unavoidable to allow for easy conversion. This was supported by a `HasName Typename` instance, as well as by publishing the constructors of `Typename`.
This means that the type safety that newtypes can add is lost. In particular, it is now very easy to confuse type name customization with root field name customization.
This refactors the above design by instead introducing newtypes around the customization operations:
```haskell
newtype MkTypename = MkTypename {runMkTypename :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
newtype MkRootFieldName = MkRootFieldName {runMkRootFieldName :: Name -> Name}
deriving (Semigroup, Monoid) via (Endo Name)
```
The `Monoid` instance allows easy composition of customization operations, piggybacking off of the type of `Endo`maps.
This design allows safe co-existence of the two customization algorithms, while avoiding the syntactic overhead of packing and unpacking newtypes.
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/2989
GitOrigin-RevId: da3a353a9b003ee40c8d0a1e02872e99d2edd3ca
2021-11-30 12:51:46 +03:00
|
|
|
|
P.object $$(G.litName "st_intersects_geom_nband_input") Nothing $
|
2021-09-24 01:56:37 +03:00
|
|
|
|
STIntersectsGeomminNband
|
|
|
|
|
<$> (mkParameter <$> P.field $$(G.litName "geommin") Nothing geometryParser)
|
|
|
|
|
<*> (fmap mkParameter <$> P.fieldOptional $$(G.litName "nband") Nothing integerParser)
|
2021-02-23 20:37:27 +03:00
|
|
|
|
|
2021-04-22 00:44:37 +03:00
|
|
|
|
mkCountType :: Maybe Bool -> Maybe [Column ('Postgres pgKind)] -> CountType ('Postgres pgKind)
|
2021-09-24 01:56:37 +03:00
|
|
|
|
mkCountType _ Nothing = PG.CTStar
|
2021-02-23 20:37:27 +03:00
|
|
|
|
mkCountType (Just True) (Just cols) = PG.CTDistinct cols
|
2021-09-24 01:56:37 +03:00
|
|
|
|
mkCountType _ (Just cols) = PG.CTSimple cols
|
2021-02-23 20:37:27 +03:00
|
|
|
|
|
2021-11-18 21:02:58 +03:00
|
|
|
|
-- | Update operator that prepends a value to a column containing jsonb arrays.
|
|
|
|
|
--
|
|
|
|
|
-- Note: Currently this is Postgres specific because json columns have not been ported
|
|
|
|
|
-- to other backends yet.
|
|
|
|
|
prependOp ::
|
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
( BackendSchema ('Postgres pgKind),
|
|
|
|
|
MonadReader r m,
|
|
|
|
|
MonadError QErr m,
|
|
|
|
|
MonadSchema n m,
|
|
|
|
|
Has MkTypename r
|
|
|
|
|
) =>
|
2021-11-26 16:47:12 +03:00
|
|
|
|
SU.UpdateOperator ('Postgres pgKind) m n (UnpreparedValue ('Postgres pgKind))
|
|
|
|
|
prependOp = SU.UpdateOperator {..}
|
2021-11-18 21:02:58 +03:00
|
|
|
|
where
|
|
|
|
|
updateOperatorApplicableColumn = (isScalarColumnWhere (== PGJSONB) . pgiType)
|
|
|
|
|
|
|
|
|
|
updateOperatorParser tableGQLName tableName columns = do
|
|
|
|
|
let typedParser columnInfo =
|
|
|
|
|
fmap P.mkParameter
|
|
|
|
|
<$> BS.columnParser
|
|
|
|
|
(pgiType columnInfo)
|
|
|
|
|
(G.Nullability $ pgiIsNullable columnInfo)
|
|
|
|
|
|
|
|
|
|
desc = "prepend existing jsonb value of filtered columns with new jsonb value"
|
|
|
|
|
|
2021-11-26 16:47:12 +03:00
|
|
|
|
SU.updateOperator
|
2021-11-18 21:02:58 +03:00
|
|
|
|
tableGQLName
|
|
|
|
|
$$(G.litName "_prepend")
|
|
|
|
|
typedParser
|
|
|
|
|
columns
|
|
|
|
|
desc
|
|
|
|
|
desc
|
|
|
|
|
|
|
|
|
|
-- | Update operator that appends a value to a column containing jsonb arrays.
|
|
|
|
|
--
|
|
|
|
|
-- Note: Currently this is Postgres specific because json columns have not been ported
|
|
|
|
|
-- to other backends yet.
|
|
|
|
|
appendOp ::
|
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
( BackendSchema ('Postgres pgKind),
|
|
|
|
|
MonadReader r m,
|
|
|
|
|
MonadError QErr m,
|
|
|
|
|
MonadSchema n m,
|
|
|
|
|
Has MkTypename r
|
|
|
|
|
) =>
|
2021-11-26 16:47:12 +03:00
|
|
|
|
SU.UpdateOperator ('Postgres pgKind) m n (UnpreparedValue ('Postgres pgKind))
|
|
|
|
|
appendOp = SU.UpdateOperator {..}
|
2021-11-18 21:02:58 +03:00
|
|
|
|
where
|
|
|
|
|
updateOperatorApplicableColumn = (isScalarColumnWhere (== PGJSONB) . pgiType)
|
|
|
|
|
|
|
|
|
|
updateOperatorParser tableGQLName tableName columns = do
|
|
|
|
|
let typedParser columnInfo =
|
|
|
|
|
fmap P.mkParameter
|
|
|
|
|
<$> BS.columnParser
|
|
|
|
|
(pgiType columnInfo)
|
|
|
|
|
(G.Nullability $ pgiIsNullable columnInfo)
|
|
|
|
|
|
|
|
|
|
desc = "append existing jsonb value of filtered columns with new jsonb value"
|
2021-11-26 16:47:12 +03:00
|
|
|
|
SU.updateOperator
|
2021-11-18 21:02:58 +03:00
|
|
|
|
tableGQLName
|
|
|
|
|
$$(G.litName "_append")
|
|
|
|
|
typedParser
|
|
|
|
|
columns
|
|
|
|
|
desc
|
|
|
|
|
desc
|
|
|
|
|
|
|
|
|
|
-- | Update operator that deletes a value at a specified key from a column
|
|
|
|
|
-- containing jsonb objects.
|
|
|
|
|
--
|
|
|
|
|
-- Note: Currently this is Postgres specific because json columns have not been ported
|
|
|
|
|
-- to other backends yet.
|
|
|
|
|
deleteKeyOp ::
|
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
( BackendSchema ('Postgres pgKind),
|
|
|
|
|
MonadReader r m,
|
|
|
|
|
MonadError QErr m,
|
|
|
|
|
MonadSchema n m,
|
|
|
|
|
Has MkTypename r
|
|
|
|
|
) =>
|
2021-11-26 16:47:12 +03:00
|
|
|
|
SU.UpdateOperator ('Postgres pgKind) m n (UnpreparedValue ('Postgres pgKind))
|
|
|
|
|
deleteKeyOp = SU.UpdateOperator {..}
|
2021-11-18 21:02:58 +03:00
|
|
|
|
where
|
|
|
|
|
updateOperatorApplicableColumn = (isScalarColumnWhere (== PGJSONB) . pgiType)
|
|
|
|
|
|
|
|
|
|
updateOperatorParser tableGQLName tableName columns = do
|
|
|
|
|
let nullableTextParser _ = fmap P.mkParameter <$> columnParser (ColumnScalar PGText) (G.Nullability True)
|
|
|
|
|
desc = "delete key/value pair or string element. key/value pairs are matched based on their key value"
|
|
|
|
|
|
2021-11-26 16:47:12 +03:00
|
|
|
|
SU.updateOperator
|
2021-11-18 21:02:58 +03:00
|
|
|
|
tableGQLName
|
|
|
|
|
$$(G.litName "_delete_key")
|
|
|
|
|
nullableTextParser
|
|
|
|
|
columns
|
|
|
|
|
desc
|
|
|
|
|
desc
|
|
|
|
|
|
|
|
|
|
-- | Update operator that deletes a value at a specific index from a column
|
|
|
|
|
-- containing jsonb arrays.
|
|
|
|
|
--
|
|
|
|
|
-- Note: Currently this is Postgres specific because json columns have not been ported
|
|
|
|
|
-- to other backends yet.
|
|
|
|
|
deleteElemOp ::
|
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
( BackendSchema ('Postgres pgKind),
|
|
|
|
|
MonadReader r m,
|
|
|
|
|
MonadError QErr m,
|
|
|
|
|
MonadSchema n m,
|
|
|
|
|
Has MkTypename r
|
|
|
|
|
) =>
|
2021-11-26 16:47:12 +03:00
|
|
|
|
SU.UpdateOperator ('Postgres pgKind) m n (UnpreparedValue ('Postgres pgKind))
|
|
|
|
|
deleteElemOp = SU.UpdateOperator {..}
|
2021-11-18 21:02:58 +03:00
|
|
|
|
where
|
|
|
|
|
updateOperatorApplicableColumn = (isScalarColumnWhere (== PGJSONB) . pgiType)
|
|
|
|
|
|
|
|
|
|
updateOperatorParser tableGQLName tableName columns = do
|
|
|
|
|
let nonNullableIntParser _ = fmap P.mkParameter <$> columnParser (ColumnScalar PGInteger) (G.Nullability False)
|
|
|
|
|
desc =
|
|
|
|
|
"delete the array element with specified index (negative integers count from the end). "
|
|
|
|
|
<> "throws an error if top level container is not an array"
|
|
|
|
|
|
2021-11-26 16:47:12 +03:00
|
|
|
|
SU.updateOperator
|
2021-11-18 21:02:58 +03:00
|
|
|
|
tableGQLName
|
|
|
|
|
$$(G.litName "_delete_elem")
|
|
|
|
|
nonNullableIntParser
|
|
|
|
|
columns
|
|
|
|
|
desc
|
|
|
|
|
desc
|
|
|
|
|
|
|
|
|
|
-- | Update operator that deletes a field at a certan path from a column
|
|
|
|
|
-- containing jsonb objects.
|
|
|
|
|
--
|
|
|
|
|
-- Note: Currently this is Postgres specific because json columns have not been ported
|
|
|
|
|
-- to other backends yet.
|
|
|
|
|
deleteAtPathOp ::
|
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
( BackendSchema ('Postgres pgKind),
|
|
|
|
|
MonadReader r m,
|
|
|
|
|
MonadError QErr m,
|
|
|
|
|
MonadSchema n m,
|
|
|
|
|
Has MkTypename r
|
|
|
|
|
) =>
|
2021-11-26 16:47:12 +03:00
|
|
|
|
SU.UpdateOperator ('Postgres pgKind) m n [UnpreparedValue ('Postgres pgKind)]
|
|
|
|
|
deleteAtPathOp = SU.UpdateOperator {..}
|
2021-11-18 21:02:58 +03:00
|
|
|
|
where
|
|
|
|
|
updateOperatorApplicableColumn = (isScalarColumnWhere (== PGJSONB) . pgiType)
|
|
|
|
|
|
|
|
|
|
updateOperatorParser tableGQLName tableName columns = do
|
|
|
|
|
let nonNullableTextListParser _ = P.list . fmap (P.mkParameter) <$> columnParser (ColumnScalar PGText) (G.Nullability False)
|
|
|
|
|
desc = "delete the field or element with specified path (for JSON arrays, negative integers count from the end)"
|
|
|
|
|
|
2021-11-26 16:47:12 +03:00
|
|
|
|
SU.updateOperator
|
2021-11-18 21:02:58 +03:00
|
|
|
|
tableGQLName
|
|
|
|
|
$$(G.litName "_delete_at_path")
|
|
|
|
|
nonNullableTextListParser
|
|
|
|
|
columns
|
|
|
|
|
desc
|
|
|
|
|
desc
|
|
|
|
|
|
|
|
|
|
-- | Various update operators
|
|
|
|
|
updateOperators ::
|
|
|
|
|
forall pgKind m n r.
|
|
|
|
|
( MonadParse n,
|
|
|
|
|
MonadReader r m,
|
|
|
|
|
Has MkTypename r,
|
|
|
|
|
MonadError QErr m,
|
|
|
|
|
MonadSchema n m,
|
|
|
|
|
BackendSchema ('Postgres pgKind)
|
|
|
|
|
) =>
|
|
|
|
|
TableInfo ('Postgres pgKind) ->
|
|
|
|
|
UpdPermInfo ('Postgres pgKind) ->
|
2021-11-25 00:39:42 +03:00
|
|
|
|
m (InputFieldsParser n (HashMap (Column ('Postgres pgKind)) (UpdateOpExpression (UnpreparedValue ('Postgres pgKind)))))
|
2021-11-18 21:02:58 +03:00
|
|
|
|
updateOperators tableInfo updatePermissions =
|
2021-11-26 16:47:12 +03:00
|
|
|
|
SU.buildUpdateOperators
|
|
|
|
|
(PGIR.UpdateSet <$> SU.presetColumns updatePermissions)
|
|
|
|
|
[ PGIR.UpdateSet <$> SU.setOp,
|
|
|
|
|
PGIR.UpdateInc <$> SU.incOp,
|
2021-11-25 00:39:42 +03:00
|
|
|
|
PGIR.UpdatePrepend <$> prependOp,
|
|
|
|
|
PGIR.UpdateAppend <$> appendOp,
|
|
|
|
|
PGIR.UpdateDeleteKey <$> deleteKeyOp,
|
|
|
|
|
PGIR.UpdateDeleteElem <$> deleteElemOp,
|
|
|
|
|
PGIR.UpdateDeleteAtPath <$> deleteAtPathOp
|
2021-11-18 21:02:58 +03:00
|
|
|
|
]
|
|
|
|
|
tableInfo
|
|
|
|
|
updatePermissions
|