Support representing single-column Postgres tables as GraphQL enums (close #982) (#2672)

This commit is contained in:
Alexis King 2019-08-26 01:05:56 -05:00 committed by GitHub
commit 6b9b2b67cb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
119 changed files with 3549 additions and 2036 deletions

View File

@ -159,7 +159,7 @@ jobs:
# build the server binary, and package into docker image
build_server:
docker:
- image: hasura/graphql-engine-server-builder:20190507-1
- image: hasura/graphql-engine-server-builder:20190811
working_directory: ~/graphql-engine
steps:
- attach_workspace:
@ -235,7 +235,7 @@ jobs:
environment:
PG_VERSION: "11_1"
docker:
- image: hasura/graphql-engine-server-builder:20190507-1
- image: hasura/graphql-engine-server-builder:20190811
# TODO: change this to circleci postgis when they have one for pg 11
- image: mdillon/postgis:11-alpine
<<: *test_pg_env
@ -245,7 +245,7 @@ jobs:
environment:
PG_VERSION: "10_6"
docker:
- image: hasura/graphql-engine-server-builder:20190507-1
- image: hasura/graphql-engine-server-builder:20190811
- image: circleci/postgres:10.6-alpine-postgis
<<: *test_pg_env
@ -254,7 +254,7 @@ jobs:
environment:
PG_VERSION: "9_6"
docker:
- image: hasura/graphql-engine-server-builder:20190507-1
- image: hasura/graphql-engine-server-builder:20190811
- image: circleci/postgres:9.6-alpine-postgis
<<: *test_pg_env
@ -263,7 +263,7 @@ jobs:
environment:
PG_VERSION: "9_5"
docker:
- image: hasura/graphql-engine-server-builder:20190507-1
- image: hasura/graphql-engine-server-builder:20190811
- image: circleci/postgres:9.5-alpine-postgis
<<: *test_pg_env

View File

@ -28,3 +28,5 @@ RUN apt-get -y update \
&& rm -rf /usr/share/doc/ \
&& rm -rf /usr/share/man/ \
&& rm -rf /usr/share/locale/
ENV LANG=C.UTF-8 LC_ALL=C.UTF-8

View File

@ -34,7 +34,6 @@ var testMetadataPrev = map[string][]byte{
"metadata": []byte(`allowlist: []
functions: []
query_collections: []
query_templates: []
remote_schemas: []
tables:
- array_relationships: []
@ -49,7 +48,6 @@ tables:
"empty-metadata": []byte(`allowlist: []
functions: []
query_collections: []
query_templates: []
remote_schemas: []
tables: []
`),
@ -65,6 +63,7 @@ tables:
delete_permissions: []
event_triggers: []
insert_permissions: []
is_enum: false
object_relationships: []
select_permissions: []
table: test
@ -264,7 +263,7 @@ func mustWriteFile(t testing.TB, dir, file string, body string) {
func compareMetadata(t testing.TB, metadataFile string, actualType string, serverVersion *semver.Version) {
var actualData []byte
c, err := semver.NewConstraint("<= v1.0.0-beta.3")
c, err := semver.NewConstraint("<= v1.0.0-beta.5")
if err != nil {
t.Fatal(err)
}

View File

@ -186,6 +186,10 @@ ul {
position: relative;
}
#docs-content span.target {
font-style: italic;
}
/*** random overrides ***/
.wy-plain-list-decimal ol,

View File

@ -49,6 +49,57 @@ Args syntax
- true
- :ref:`TableName <TableName>`
- Name of the table
* - is_enum
- false
- Boolean
- When set to ``true``, creates the table as an :ref:`enum table <enum table>`.
.. _set_table_is_enum:
set_table_is_enum
-----------------
``set_table_is_enum`` sets whether an already-tracked table should be used as an :ref:`enum table <enum table>`.
Use table ``user_role`` as an enum table:
.. code-block:: http
POST /v1/query HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
{
"type": "set_table_is_enum",
"args": {
"table": {
"schema": "public",
"name": "user_role"
},
"is_enum": true
}
}
.. _set_table_is_enum_syntax:
Args syntax
^^^^^^^^^^^
.. list-table::
:header-rows: 1
* - Key
- Required
- Schema
- Description
* - table
- true
- :ref:`TableName <TableName>`
- Name of the table
* - is_enum
- true
- Boolean
- Whether or not the table should be used as an :ref:`enum table <enum table>`.
.. _untrack_table:
@ -76,7 +127,7 @@ Remove a table/view ``author``:
}
}
.. _untrack_table_syntax:
Args syntax

View File

@ -1,103 +1,176 @@
Enum type fields
================
.. contents:: Table of contents
:backlinks: none
:depth: 1
:local:
Enum type fields are restricted to a fixed set of allowed values. In a relational database such as
Postgres, an enum type field in a table can be defined in two ways:
Enum type fields can only take a value from a fixed set of allowed values.
1. Using `native Postgres enum types <https://www.postgresql.org/docs/current/datatype-enum.html>`__.
In a relational database such as Postgres, an enum type field in a table can be defined by:
While the most obvious solution, native enum types have significant drawbacks: they are not easily mutable.
New values cannot be added to an enum inside a transaction (that is, ``ALTER TYPE ... ADD VALUE`` is not
supported by transactional DDL), and values cannot be removed from an enum at all without completely dropping
and recreating it (which cannot be done if the enum is in use by *any* tables, views, or functions). Therefore,
native enum types should only be used for enums that are guaranteed to *never* change, such as days of the
week.
- using native database enum types
- setting a foreign-key to a reference table which contains the list of allowed values.
2. Using `foreign-key references <https://www.postgresql.org/docs/current/tutorial-fk.html>`__ to a single-column
table.
`Postgres Enum types <https://www.postgresql.org/docs/current/datatype-enum.html>`__ are not easily mutable. Hence
they should be used only for enums which are not going to change over time. e.g. measurement units, days of the
week, etc.
This approach represents an enum using ordinary relational database concepts. The enum type is represented by a
table, and the values of the enum are rows in the table. Columns in other tables that use the enum are ordinary
foreign-key references to the enum table.
For enums whose values are dynamic and will require updates, the reference table approach is recommended. e.g. list
of tags, list of teams, etc.
For enums with values that are dynamic and may require updates, such as a list of tags or user roles, this
approach is strongly recommended. Modifying an enum defined this way is easy: simply insert, update, or delete
rows in the enum table (and updates or deletes can even be cascaded to references, and they may be done within
a transaction).
Given the limitations of native Postgres enum types, Hasura currently only generates GraphQL enum types for enums
defined using the second approach (i.e. referenced tables). You may use native Postgres enum types in your database
schema, but they will essentially be treated like text fields in the generated GraphQL schema. Therefore, this guide
focuses primarily on modeling an enum using a reference table, but you may still use native Postgres enum types to
help maintain data consistency in your database.
Example: Modeling an enum using an enum table
---------------------------------------------
Lets say we have a database that tracks user information, and users may only have one of three specific roles: user,
moderator, or administrator. To represent that, we might have a ``users`` table with the following schema:
.. code-block:: sql
CREATE TABLE users (
id serial PRIMARY KEY,
name text NOT NULL,
role text NOT NULL
);
Now we can insert some users into our database:
.. code-block:: sql
INSERT INTO users (name, role) VALUES
('Alyssa', 'administrator'),
('Ben', 'moderator'),
('Gerald', 'user');
This works alright, but it doesnt prevent us from inserting nonsensical values for ``role``, such as
.. code-block:: sql
INSERT INTO users (name, role) VALUES
('Hal', 'spaghetti');
which we certainly dont want. Lets create an enum to restrict the allowed values.
Create an enum table
^^^^^^^^^^^^^^^^^^^^
To represent our enum, were going to create an _`enum table`, which for Hasuras purposes is any table that meets
the following restrictions:
1. The table must have a single-column primary key of type ``text``. The values of this column are the legal values
of the enum, and they must all be `valid GraphQL enum value names
<https://graphql.github.io/graphql-spec/June2018/#EnumValue>`__.
2. Optionally, the table may have a second column, also of type ``text``, which will be used as a description of each
value in the generated GraphQL schema.
3. The table may not contain any other columns.
For example, to create an enum that represents our user roles, we would create the following table:
.. code-block:: sql
CREATE TABLE user_role (
value text PRIMARY KEY,
comment text
);
INSERT INTO user_role (value, comment) VALUES
('user', 'Ordinary users'),
('moderator', 'Users with the privilege to ban users'),
('administrator', 'Users with the privilege to set users roles');
Use the enum table
^^^^^^^^^^^^^^^^^^
Now that weve created an enum table, we need to update our ``users`` table to reference it:
.. code-block:: sql
ALTER TABLE users ADD CONSTRAINT
users_role_fkey FOREIGN KEY (role) REFERENCES user_role;
Next, we need to tell Hasura that this table represents an enum. We can do that by passing ``true`` for the
``is_enum`` option of the :ref:`track_table` API, or we can use the :ref:`set_table_is_enum` API to change whether or
not an already-tracked table should be used as an enum:
.. code-block:: http
POST /v1/query HTTP/1.1
Content-Type: application/json
X-Hasura-Role: admin
{
"type": "track_table",
"args": {
"table": {
"schema": "public",
"name": "user_role"
},
"is_enum": true
}
}
Make queries using enum values
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Once the table has been tracked as an enum, the GraphQL schema will be updated to reflect that the ``role`` column of
the ``users`` table only permits the values in the ``user_role`` table:
.. code-block:: graphql
type users {
id: Int!
name: String!
role: user_role_enum!
}
enum user_role_enum {
"Users with the privilege to set users roles"
administrator
"Users with the privilege to ban users"
moderator
"Ordinary users"
user
}
When making queries that filter on the ``role`` column, use the name of the enum value directly rather than providing
a string:
.. graphiql::
:view_only:
:query:
{
users(where: {role: {_eq: administrator}}) {
id
name
}
}
:response:
{
"data": {
"users": [
{
"id": 1,
"name": "Alyssa"
}
]
}
}
.. admonition:: Current limitations
Hasura currently does not generate GraphQL enums. This feature is being worked upon. Hence this guide is currently
only tailored towards helping you maintain data consistency in your database
**For example**, let's say we have a table ``magazine`` with fields ``(id, title, issue_month, issue_year)``
and we would like to restrict the values of the ``issue_month`` field to just the months of the year (i.e. January,
February, and so on).
The following are the approaches we can use to achieve this:
Option 1: Using native Postgres enum type
-----------------------------------------
Create a Postgres enum type
^^^^^^^^^^^^^^^^^^^^^^^^^^^
Open the Hasura console and head to the ``Data -> SQL`` interface.
Run the following SQL statement:
.. code-block:: sql
CREATE TYPE month AS ENUM ('January', 'February', 'March', 'and so on...');
Set column type as the Postgres enum type
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Run the following SQL statement if the table doesn't yet exist:
.. code-block:: sql
:emphasize-lines: 4
CREATE TABLE magazine(
id serial PRIMARY KEY,
title text NOT NULL,
issue_month month,
issue_year integer
);
If table exists, run the following SQL statement:
.. code-block:: sql
ALTER TABLE magazine
ALTER COLUMN issue_month TYPE month using issue_month::month;
Now the ``issue_month`` field can only take values from the months of the year.
See `Postgres Enum types documentation <https://www.postgresql.org/docs/current/datatype-enum.html>`__ for more info.
Option 2: Using a reference table
---------------------------------
Create a reference table for the enum
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Open the Hasura console and head to the ``Data -> Create table`` interface.
Create a table ``months_of_the_year`` with just one column ``month``, which is the primary key:
.. thumbnail:: ../../../img/graphql/manual/schema/enum-create-ref-table.png
Add the allowed enum values to the reference table
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Head to the ``GraphiQL`` tab of the console and run an insert mutation to insert the allowed enum values:
.. thumbnail:: ../../../img/graphql/manual/schema/enum-insert-ref-values.png
Add a foreign-key constraint to the reference table
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Head to the ``Data -> magazine -> Modify`` tab of the console and set a foreign-key to the ``months_of_the_year`` table
using the fields: ``issue_month -> months_of_the_year :: month``:
.. thumbnail:: ../../../img/graphql/manual/schema/enum-set-foreign-key.png
Now the ``issue_month`` field can only take values from the months of the year.
Currently, Hasura does not automatically detect changes to the contents of enum tables, so the GraphQL schema will
only be updated after manually reloading metadata after inserting, updating, or deleting rows from an enum table.

View File

@ -56,6 +56,7 @@ library
, containers
, monad-control
, monad-time
, monad-validate
, fast-logger
, wai
, postgresql-binary
@ -171,16 +172,17 @@ library
, Hasura.RQL.Instances
, Hasura.RQL.Types.SchemaCache
, Hasura.RQL.Types.SchemaCacheTypes
, Hasura.RQL.Types.Common
, Hasura.RQL.Types.Catalog
, Hasura.RQL.Types.BoolExp
, Hasura.RQL.Types.Permission
, Hasura.RQL.Types.Error
, Hasura.RQL.Types.Catalog
, Hasura.RQL.Types.Column
, Hasura.RQL.Types.Common
, Hasura.RQL.Types.DML
, Hasura.RQL.Types.Error
, Hasura.RQL.Types.EventTrigger
, Hasura.RQL.Types.RemoteSchema
, Hasura.RQL.Types.Metadata
, Hasura.RQL.Types.Permission
, Hasura.RQL.Types.QueryCollection
, Hasura.RQL.Types.RemoteSchema
, Hasura.RQL.DDL.Deps
, Hasura.RQL.DDL.Permission.Internal
, Hasura.RQL.DDL.Permission.Triggers
@ -188,10 +190,14 @@ library
, Hasura.RQL.DDL.Relationship
, Hasura.RQL.DDL.Relationship.Rename
, Hasura.RQL.DDL.Relationship.Types
, Hasura.RQL.DDL.Schema.Table
, Hasura.RQL.DDL.Schema.Rename
, Hasura.RQL.DDL.Schema.Function
, Hasura.RQL.DDL.Schema
, Hasura.RQL.DDL.Schema.Cache
, Hasura.RQL.DDL.Schema.Catalog
, Hasura.RQL.DDL.Schema.Diff
, Hasura.RQL.DDL.Schema.Enum
, Hasura.RQL.DDL.Schema.Function
, Hasura.RQL.DDL.Schema.Rename
, Hasura.RQL.DDL.Schema.Table
, Hasura.RQL.DDL.Metadata
, Hasura.RQL.DDL.Utils
, Hasura.RQL.DDL.EventTrigger
@ -258,6 +264,7 @@ library
, Hasura.HTTP
, Control.Lens.Extended
, Data.Text.Extended
, Data.Aeson.Extended
, Data.Sequence.NonEmpty
@ -266,11 +273,12 @@ library
, Data.Parser.JSONPath
, Hasura.SQL.DML
, Hasura.SQL.Error
, Hasura.SQL.GeoJSON
, Hasura.SQL.Rewrite
, Hasura.SQL.Time
, Hasura.SQL.Types
, Hasura.SQL.Value
, Hasura.SQL.GeoJSON
, Hasura.SQL.Time
, Hasura.SQL.Rewrite
, Network.URI.Extended
, Ops
, Migrate
@ -278,28 +286,31 @@ library
other-modules: Hasura.Server.Auth.JWT.Internal
, Hasura.Server.Auth.JWT.Logging
default-extensions: EmptyCase
FlexibleContexts
FlexibleInstances
InstanceSigs
MultiParamTypeClasses
LambdaCase
MultiWayIf
TupleSections
default-extensions: ApplicativeDo
BangPatterns
ConstraintKinds
DeriveDataTypeable
DeriveFoldable
DeriveFunctor
DeriveGeneric
DeriveLift
DeriveTraversable
EmptyCase
FlexibleContexts
FlexibleInstances
GeneralizedNewtypeDeriving
BangPatterns
InstanceSigs
LambdaCase
MultiParamTypeClasses
MultiWayIf
NoImplicitPrelude
OverloadedStrings
QuasiQuotes
ScopedTypeVariables
TemplateHaskell
QuasiQuotes
TupleSections
TypeApplications
TypeFamilies
NoImplicitPrelude
DeriveDataTypeable
if flag(profile)
@ -308,6 +319,8 @@ library
cpp-options: -DDeveloperAPIs
ghc-options: -O2
-foptimal-applicative-do
-fdefer-typed-holes
-Wall
-Wcompat
-Wincomplete-record-updates
@ -315,27 +328,31 @@ library
-Wredundant-constraints
executable graphql-engine
default-extensions: EmptyCase
FlexibleContexts
FlexibleInstances
InstanceSigs
MultiParamTypeClasses
LambdaCase
MultiWayIf
TupleSections
default-extensions: ApplicativeDo
BangPatterns
ConstraintKinds
DeriveDataTypeable
DeriveFoldable
DeriveFunctor
DeriveGeneric
DeriveLift
DeriveTraversable
EmptyCase
FlexibleContexts
FlexibleInstances
GeneralizedNewtypeDeriving
BangPatterns
InstanceSigs
LambdaCase
MultiParamTypeClasses
MultiWayIf
NoImplicitPrelude
OverloadedStrings
QuasiQuotes
ScopedTypeVariables
TemplateHaskell
QuasiQuotes
TupleSections
TypeApplications
TypeFamilies
NoImplicitPrelude
main-is: Main.hs
default-language: Haskell2010
@ -368,6 +385,8 @@ executable graphql-engine
ghc-prof-options: -rtsopts -fprof-auto -fno-prof-count-entries
ghc-options: -O2
-foptimal-applicative-do
-fdefer-typed-holes
-Wall
-Wcompat
-Wincomplete-record-updates

View File

@ -4,22 +4,22 @@ module Migrate
)
where
import Data.Time.Clock (UTCTime)
import Language.Haskell.TH.Syntax (Q, TExp, unTypeQ)
import Data.Time.Clock (UTCTime)
import Language.Haskell.TH.Syntax (Q, TExp, unTypeQ)
import Hasura.Prelude
import Hasura.RQL.DDL.Schema.Table
import Hasura.RQL.DDL.Schema
import Hasura.RQL.Types
import Hasura.Server.Query
import qualified Data.Aeson as A
import qualified Data.Text as T
import qualified Data.Yaml.TH as Y
import qualified Data.Aeson as A
import qualified Data.Text as T
import qualified Data.Yaml.TH as Y
import qualified Database.PG.Query as Q
import qualified Database.PG.Query as Q
curCatalogVer :: T.Text
curCatalogVer = "19"
curCatalogVer = "20"
migrateMetadata
:: ( MonadTx m
@ -344,6 +344,12 @@ from18To19 = do
$(Q.sqlFromFile "src-rsr/migrate_from_18_to_19.sql")
return ()
from19To20 :: (MonadTx m) => m ()
from19To20 = do
Q.Discard () <- liftTx $ Q.multiQE defaultTxErrorHandler
$(Q.sqlFromFile "src-rsr/migrate_from_19_to_20.sql")
pure ()
migrateCatalog
:: ( MonadTx m
, CacheRWM m
@ -353,70 +359,39 @@ migrateCatalog
, HasSQLGenCtx m
)
=> UTCTime -> m String
migrateCatalog migrationTime = do
preVer <- getCatalogVersion
if | preVer == curCatalogVer ->
return $ "already at the latest version. current version: "
<> show curCatalogVer
| preVer == "0.8" -> from08ToCurrent
| preVer == "1" -> from1ToCurrent
| preVer == "2" -> from2ToCurrent
| preVer == "3" -> from3ToCurrent
| preVer == "4" -> from4ToCurrent
| preVer == "5" -> from5ToCurrent
| preVer == "6" -> from6ToCurrent
| preVer == "7" -> from7ToCurrent
| preVer == "8" -> from8ToCurrent
| preVer == "9" -> from9ToCurrent
| preVer == "10" -> from10ToCurrent
| preVer == "11" -> from11ToCurrent
| preVer == "12" -> from12ToCurrent
| preVer == "13" -> from13ToCurrent
| preVer == "14" -> from14ToCurrent
| preVer == "15" -> from15ToCurrent
| preVer == "16" -> from16ToCurrent
| preVer == "17" -> from17ToCurrent
| preVer == "18" -> from18ToCurrent
| otherwise -> throw400 NotSupported $
"unsupported version : " <> preVer
migrateCatalog migrationTime = migrateFrom =<< getCatalogVersion
where
from18ToCurrent = from18To19 >> postMigrate
from17ToCurrent = from17To18 >> from18ToCurrent
from16ToCurrent = from16To17 >> from17ToCurrent
from15ToCurrent = from15To16 >> from16ToCurrent
from14ToCurrent = from14To15 >> from15ToCurrent
from13ToCurrent = from13To14 >> from14ToCurrent
from12ToCurrent = from12To13 >> from13ToCurrent
from11ToCurrent = from11To12 >> from12ToCurrent
from10ToCurrent = from10To11 >> from11ToCurrent
from9ToCurrent = from9To10 >> from10ToCurrent
from8ToCurrent = from8To9 >> from9ToCurrent
from7ToCurrent = from7To8 >> from8ToCurrent
from6ToCurrent = from6To7 >> from7ToCurrent
from5ToCurrent = from5To6 >> from6ToCurrent
from4ToCurrent = from4To5 >> from5ToCurrent
from3ToCurrent = from3To4 >> from4ToCurrent
from2ToCurrent = from2To3 >> from3ToCurrent
from1ToCurrent = from1To2 >> from2ToCurrent
from08ToCurrent = from08To1 >> from1ToCurrent
migrateFrom previousVersion
| previousVersion == curCatalogVer =
return $ "already at the latest version. current version: " <> show curCatalogVer
| [] <- neededMigrations =
throw400 NotSupported $ "unsupported version : " <> previousVersion
| otherwise =
traverse_ snd neededMigrations >> postMigrate
where
neededMigrations = dropWhile ((/= previousVersion) . fst) migrations
migrations =
[ ("0.8", from08To1)
, ("1", from1To2)
, ("2", from2To3)
, ("3", from3To4)
, ("4", from4To5)
, ("5", from5To6)
, ("6", from6To7)
, ("7", from7To8)
, ("8", from8To9)
, ("9", from9To10)
, ("10", from10To11)
, ("11", from11To12)
, ("12", from12To13)
, ("13", from13To14)
, ("14", from14To15)
, ("15", from15To16)
, ("16", from16To17)
, ("17", from17To18)
, ("18", from18To19)
, ("19", from19To20)
]
postMigrate = do
-- update the catalog version

View File

@ -10,7 +10,7 @@ import Migrate (curCatalogVer)
import Hasura.EncJSON
import Hasura.Prelude
import Hasura.RQL.DDL.Schema.Table
import Hasura.RQL.DDL.Schema
import Hasura.RQL.Types
import Hasura.Server.Query
import Hasura.SQL.Types

View File

@ -0,0 +1,19 @@
module Control.Lens.Extended
( module Control.Lens
, (^..)
, (^@..)
) where
import Control.Lens hiding ((^..), (^@..))
import Data.Monoid (Endo)
import GHC.Exts (IsList, Item, fromList)
infixl 8 ^..
(^..) :: (IsList l, Item l ~ a) => s -> Getting (Endo [a]) s a -> l
v ^.. l = fromList (toListOf l v)
{-# INLINE (^..) #-}
infixl 8 ^@..
(^@..) :: (IsList l, Item l ~ (i, a)) => s -> IndexedGetting i (Endo [(i, a)]) s a -> l
v ^@.. l = fromList (itoListOf l v)
{-# INLINE (^@..) #-}

View File

@ -12,15 +12,21 @@ module Hasura.Db
, RespTx
, LazyRespTx
, defaultTxErrorHandler
, mkTxErrorHandler
) where
import qualified Data.Aeson.Extended as J
import qualified Database.PG.Query as Q
import Control.Lens
import Control.Monad.Validate
import qualified Data.Aeson.Extended as J
import qualified Database.PG.Query as Q
import qualified Database.PG.Query.Connection as Q
import Hasura.EncJSON
import Hasura.Prelude
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.Permission
import Hasura.SQL.Error
import Hasura.SQL.Types
data PGExecCtx
@ -34,9 +40,10 @@ class (MonadError QErr m) => MonadTx m where
instance (MonadTx m) => MonadTx (StateT s m) where
liftTx = lift . liftTx
instance (MonadTx m) => MonadTx (ReaderT s m) where
liftTx = lift . liftTx
instance (MonadTx m) => MonadTx (ValidateT e m) where
liftTx = lift . liftTx
data LazyTx e a
= LTErr !e
@ -76,9 +83,39 @@ setHeadersTx uVars =
pgFmtLit (J.encodeToStrictText uVars)
defaultTxErrorHandler :: Q.PGTxErr -> QErr
defaultTxErrorHandler txe =
let e = internalError "postgres query error"
in e {qeInternal = Just $ J.toJSON txe}
defaultTxErrorHandler = mkTxErrorHandler (const False)
-- | Constructs a transaction error handler given a predicate that determines which errors are
-- expected and should be reported to the user. All other errors are considered internal errors.
mkTxErrorHandler :: (PGErrorType -> Bool) -> Q.PGTxErr -> QErr
mkTxErrorHandler isExpectedError txe = fromMaybe unexpectedError expectedError
where
unexpectedError = (internalError "postgres query error") { qeInternal = Just $ J.toJSON txe }
expectedError = uncurry err400 <$> do
errorDetail <- Q.getPGStmtErr txe
message <- Q.edMessage errorDetail
errorType <- pgErrorType errorDetail
guard $ isExpectedError errorType
pure $ case errorType of
PGIntegrityConstraintViolation code ->
let cv = (ConstraintViolation,)
customMessage = (code ^? _Just._PGErrorSpecific) <&> \case
PGRestrictViolation -> cv "Can not delete or update due to data being referred. "
PGNotNullViolation -> cv "Not-NULL violation. "
PGForeignKeyViolation -> cv "Foreign key violation. "
PGUniqueViolation -> cv "Uniqueness violation. "
PGCheckViolation -> (PermissionError, "Check constraint violation. ")
PGExclusionViolation -> cv "Exclusion violation. "
in maybe (ConstraintViolation, message) (fmap (<> message)) customMessage
PGDataException code -> case code of
Just (PGErrorSpecific PGInvalidEscapeSequence) -> (BadRequest, message)
_ -> (DataException, message)
PGSyntaxErrorOrAccessRuleViolation code -> (ConstraintError,) $ case code of
Just (PGErrorSpecific PGInvalidColumnReference) ->
"there is no unique or exclusion constraint on target column(s)"
_ -> message
withUserInfo :: UserInfo -> LazyTx QErr a -> LazyTx QErr a
withUserInfo uInfo = \case

View File

@ -432,7 +432,7 @@ tryWebhook headers responseTimeout ep webhook = do
getEventTriggerInfoFromEvent :: SchemaCache -> Event -> Maybe EventTriggerInfo
getEventTriggerInfoFromEvent sc e = let table = eTable e
tableInfo = M.lookup table $ scTables sc
in M.lookup ( tmName $ eTrigger e) =<< (tiEventTriggerInfoMap <$> tableInfo)
in M.lookup ( tmName $ eTrigger e) =<< (_tiEventTriggerInfoMap <$> tableInfo)
fetchEvents :: Q.TxE QErr [Event]
fetchEvents =

View File

@ -12,16 +12,24 @@ import Hasura.GraphQL.Resolve.Types
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.Types.Permission
-- | A /GraphQL context/, aka the final output of GraphQL schema generation. Used to both validate
-- incoming queries and respond to introspection queries.
--
-- Combines information from 'TyAgg', 'RootFields', and 'InsCtxMap' datatypes and adds a bit more on
-- top. Constructed via the 'mkGCtx' smart constructor.
data GCtx
= GCtx
{ _gTypes :: !TypeMap
, _gFields :: !FieldMap
, _gOrdByCtx :: !OrdByCtx
, _gQueryRoot :: !ObjTyInfo
, _gMutRoot :: !(Maybe ObjTyInfo)
, _gSubRoot :: !(Maybe ObjTyInfo)
, _gOpCtxMap :: !OpCtxMap
, _gInsCtxMap :: !InsCtxMap
-- GraphQL type information
{ _gTypes :: !TypeMap
, _gFields :: !FieldMap
, _gQueryRoot :: !ObjTyInfo
, _gMutRoot :: !(Maybe ObjTyInfo)
, _gSubRoot :: !(Maybe ObjTyInfo)
-- Postgres type information
, _gOrdByCtx :: !OrdByCtx
, _gQueryCtxMap :: !QueryCtxMap
, _gMutationCtxMap :: !MutationCtxMap
, _gInsCtxMap :: !InsCtxMap
} deriving (Show, Eq)
data RemoteGCtx
@ -60,8 +68,7 @@ emptyGCtx =
let queryRoot = mkQueryRootTyInfo []
allTys = mkTyInfoMap $ TIObj queryRoot:defaultTypes
-- for now subscription root is query root
in GCtx allTys mempty mempty queryRoot Nothing Nothing
mempty mempty
in GCtx allTys mempty queryRoot Nothing Nothing mempty mempty mempty mempty
defaultTypes :: [TypeInfo]
defaultTypes = $(fromSchemaDocQ defaultSchema TLHasuraType)

View File

@ -224,7 +224,8 @@ getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
-- Monad for resolving a hasura query/mutation
type E m =
ReaderT ( UserInfo
, OpCtxMap
, QueryCtxMap
, MutationCtxMap
, TypeMap
, FieldMap
, OrdByCtx
@ -241,10 +242,11 @@ runE
-> m a
runE ctx sqlGenCtx userInfo action = do
res <- runExceptT $ runReaderT action
(userInfo, opCtxMap, typeMap, fldMap, ordByCtx, insCtxMap, sqlGenCtx)
(userInfo, queryCtxMap, mutationCtxMap, typeMap, fldMap, ordByCtx, insCtxMap, sqlGenCtx)
either throwError return res
where
opCtxMap = _gOpCtxMap ctx
queryCtxMap = _gQueryCtxMap ctx
mutationCtxMap = _gMutationCtxMap ctx
typeMap = _gTypes ctx
fldMap = _gFields ctx
ordByCtx = _gOrdByCtx ctx
@ -268,7 +270,7 @@ resolveMutSelSet
:: ( MonadError QErr m
, MonadReader r m
, Has UserInfo r
, Has OpCtxMap r
, Has MutationCtxMap r
, Has FieldMap r
, Has OrdByCtx r
, Has SQLGenCtx r
@ -308,7 +310,7 @@ getMutOp ctx sqlGenCtx userInfo selSet =
getSubsOpM
:: ( MonadError QErr m
, MonadReader r m
, Has OpCtxMap r
, Has QueryCtxMap r
, Has FieldMap r
, Has OrdByCtx r
, Has SQLGenCtx r

View File

@ -24,6 +24,7 @@ module Hasura.GraphQL.Execute.LiveQuery
, subsOpFromPGAST
) where
import Control.Lens
import Data.Has
import qualified Control.Concurrent.STM as STM
@ -32,7 +33,6 @@ import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import qualified Database.PG.Query.Connection as Q
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Hasura.GraphQL.Execute.LiveQuery.Fallback as LQF
@ -49,6 +49,7 @@ import Hasura.Prelude
import Hasura.RQL.DML.Select (asSingleRowJsonResp)
import Hasura.RQL.Types
import Hasura.SQL.Error
import Hasura.SQL.Types
import Hasura.SQL.Value
@ -166,18 +167,18 @@ toMultiplexedQueryVar
=> GR.UnresolvedVal -> m S.SQLExp
toMultiplexedQueryVar = \case
GR.UVPG annPGVal ->
let GR.AnnPGVal varM isNullable colTy colVal = annPGVal
let GR.AnnPGVal varM isNullable _ colVal = annPGVal
in case (varM, isNullable) of
-- we don't check for nullability as
-- this is only used for reusable plans
-- the check has to be made before this
(Just var, _) -> do
modify $ Map.insert var (colTy, colVal)
return $ fromResVars (PgTypeSimple colTy)
modify $ Map.insert var colVal
return $ fromResVars (PGTypeScalar $ pstType colVal)
[ "variables"
, G.unName $ G.unVariable var
]
_ -> return $ toTxtValue colTy colVal
_ -> return $ toTxtValue colVal
GR.UVSessVar ty sessVar ->
return $ fromResVars ty [ "user", T.toLower sessVar]
GR.UVSQL sqlExp -> return sqlExp
@ -198,19 +199,19 @@ subsOpFromPGAST
, MonadIO m
)
-- | to validate arguments
=> PGExecCtx
-- ^ to validate arguments
-- | used as part of an identifier in the underlying live query systems
-- to avoid unnecessary load on Postgres where possible
-> GH.GQLReqUnparsed
-- ^ used as part of an identifier in the underlying live query systems
-- to avoid unnecessary load on Postgres where possible
-- | variable definitions as seen in the subscription, needed in
-- checking whether the subscription can be multiplexed or not
-> [G.VariableDefinition]
-- ^ variable definitions as seen in the subscription, needed in
-- checking whether the subscription can be multiplexed or not
-- | The alias and the partially processed live query field
-> (G.Alias, GR.QueryRootFldUnresolved)
-- ^ The alias and the partially processed live query field
-> m (LiveQueryOp, Maybe SubsPlan)
subsOpFromPGAST pgExecCtx reqUnparsed varDefs (fldAls, astUnresolved) = do
@ -272,38 +273,20 @@ validateAnnVarValsOnPg pgExecCtx annVarVals = do
let valSel = mkValidationSel $ Map.elems annVarVals
Q.Discard _ <- runTx' $ liftTx $
Q.rawQE valPgErrHandler (Q.fromBuilder $ toSQL valSel) [] False
return $ fmap (txtEncodedPGVal . snd) annVarVals
Q.rawQE dataExnErrHandler (Q.fromBuilder $ toSQL valSel) [] False
return $ fmap (txtEncodedPGVal . pstValue) annVarVals
where
mkExtrs = map (flip S.Extractor Nothing . uncurry toTxtValue)
mkExtrs = map (flip S.Extractor Nothing . toTxtValue)
mkValidationSel vars =
S.mkSelect { S.selExtr = mkExtrs vars }
runTx' tx = do
res <- liftIO $ runExceptT (runLazyTx' pgExecCtx tx)
liftEither res
-- | The error handler that is used to errors in the validation SQL.
-- It tries to specifically read few PG error codes which indicate
-- that the format of the value provided for a type is incorrect
valPgErrHandler :: Q.PGTxErr -> QErr
valPgErrHandler txErr =
fromMaybe (defaultTxErrorHandler txErr) $ do
stmtErr <- Q.getPGStmtErr txErr
codeMsg <- getPGCodeMsg stmtErr
(qErrCode, qErrMsg) <- extractError codeMsg
return $ err400 qErrCode qErrMsg
where
getPGCodeMsg pged =
(,) <$> Q.edStatusCode pged <*> Q.edMessage pged
extractError = \case
-- invalid text representation
("22P02", msg) -> return (DataException, msg)
-- invalid parameter value
("22023", msg) -> return (DataException, msg)
-- invalid input values
("22007", msg) -> return (DataException, msg)
_ -> Nothing
-- Explicitly look for the class of errors raised when the format of a value provided
-- for a type is incorrect.
dataExnErrHandler = mkTxErrorHandler (has _PGDataException)
-- | Use the existing plan with new variables and session variables
-- to create a live query operation

View File

@ -34,7 +34,7 @@ import Hasura.RQL.Types
import Hasura.SQL.Types
import Hasura.SQL.Value
type PlanVariables = Map.HashMap G.Variable (Int, PGColType)
type PlanVariables = Map.HashMap G.Variable (Int, PGColumnType)
type PrepArgMap = IntMap.IntMap Q.PrepArg
data PGPlan
@ -63,7 +63,7 @@ instance J.ToJSON RootFieldPlan where
RFPRaw encJson -> J.toJSON $ TBS.fromBS encJson
RFPPostgres pgPlan -> J.toJSON pgPlan
type VariableTypes = Map.HashMap G.Variable PGColType
type VariableTypes = Map.HashMap G.Variable PGColumnType
data QueryPlan
= QueryPlan
@ -79,7 +79,7 @@ data ReusableQueryPlan
instance J.ToJSON ReusableQueryPlan where
toJSON (ReusableQueryPlan varTypes fldPlans) =
J.object [ "variables" J..= show varTypes
J.object [ "variables" J..= varTypes
, "field_plans" J..= fldPlans
]
@ -116,9 +116,9 @@ withPlan usrVars (PGPlan q reqVars prepMap) annVars = do
where
getVar accum (var, (prepNo, _)) = do
let varName = G.unName $ G.unVariable var
(_, colVal) <- onNothing (Map.lookup var annVars) $
colVal <- onNothing (Map.lookup var annVars) $
throw500 $ "missing variable in annVars : " <> varName
let prepVal = binEncoder colVal
let prepVal = toBinaryValue colVal
return $ IntMap.insert prepNo prepVal accum
-- turn the current plan into a transaction
@ -156,7 +156,7 @@ initPlanningSt =
getVarArgNum
:: (MonadState PlanningSt m)
=> G.Variable -> PGColType -> m Int
=> G.Variable -> PGColumnType -> m Int
getVarArgNum var colTy = do
PlanningSt curArgNum vars prepped <- get
case Map.lookup var vars of
@ -190,15 +190,15 @@ prepareWithPlan = \case
argNum <- case (varM, isNullable) of
(Just var, False) -> getVarArgNum var colTy
_ -> getNextArgNum
addPrepArg argNum $ binEncoder colVal
return $ toPrepParam argNum colTy
addPrepArg argNum $ toBinaryValue colVal
return $ toPrepParam argNum (pstType colVal)
R.UVSessVar ty sessVar -> do
let sessVarVal =
S.SEOpApp (S.SQLOp "->>")
[S.SEPrep 1, S.SELit $ T.toLower sessVar]
return $ flip S.SETyAnn (S.mkTypeAnn ty) $ case ty of
PgTypeSimple colTy -> withGeoVal colTy sessVarVal
PgTypeArray _ -> sessVarVal
PGTypeScalar colTy -> withGeoVal colTy sessVarVal
PGTypeArray _ -> sessVarVal
R.UVSQL sqlExp -> return sqlExp
queryRootName :: Text
@ -208,7 +208,7 @@ convertQuerySelSet
:: ( MonadError QErr m
, MonadReader r m
, Has TypeMap r
, Has OpCtxMap r
, Has QueryCtxMap r
, Has FieldMap r
, Has OrdByCtx r
, Has SQLGenCtx r

View File

@ -61,8 +61,8 @@ resolveVal userInfo = \case
RS.UVSessVar ty sessVar -> do
sessVarVal <- S.SELit <$> getSessVarVal userInfo sessVar
return $ flip S.SETyAnn (S.mkTypeAnn ty) $ case ty of
PgTypeSimple colTy -> withGeoVal colTy sessVarVal
PgTypeArray _ -> sessVarVal
PGTypeScalar colTy -> withGeoVal colTy sessVarVal
PGTypeArray _ -> sessVarVal
RS.UVSQL sqlExp -> return sqlExp
getSessVarVal
@ -87,7 +87,7 @@ explainField userInfo gCtx sqlGenCtx fld =
"__typename" -> return $ FieldPlan fName Nothing Nothing
_ -> do
unresolvedAST <-
runExplain (opCtxMap, userInfo, fldMap, orderByCtx, sqlGenCtx) $
runExplain (queryCtxMap, userInfo, fldMap, orderByCtx, sqlGenCtx) $
RS.queryFldToPGAST fld
resolvedAST <- RS.traverseQueryRootFldAST (resolveVal userInfo)
unresolvedAST
@ -99,7 +99,7 @@ explainField userInfo gCtx sqlGenCtx fld =
where
fName = GV._fName fld
opCtxMap = _gOpCtxMap gCtx
queryCtxMap = _gQueryCtxMap gCtx
fldMap = _gFields gCtx
orderByCtx = _gOrdByCtx gCtx

View File

@ -44,7 +44,7 @@ validateHdrs userInfo hdrs = do
queryFldToPGAST
:: ( MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r, Has UserInfo r
, Has OpCtxMap r
, Has QueryCtxMap r
)
=> V.Field
-> m RS.QueryRootFldUnresolved
@ -52,32 +52,26 @@ queryFldToPGAST fld = do
opCtx <- getOpCtx $ V._fName fld
userInfo <- asks getter
case opCtx of
OCSelect ctx -> do
QCSelect ctx -> do
validateHdrs userInfo (_socHeaders ctx)
RS.convertSelect ctx fld
OCSelectPkey ctx -> do
QCSelectPkey ctx -> do
validateHdrs userInfo (_spocHeaders ctx)
RS.convertSelectByPKey ctx fld
OCSelectAgg ctx -> do
QCSelectAgg ctx -> do
validateHdrs userInfo (_socHeaders ctx)
RS.convertAggSelect ctx fld
OCFuncQuery ctx -> do
QCFuncQuery ctx -> do
validateHdrs userInfo (_fqocHeaders ctx)
RS.convertFuncQuerySimple ctx fld
OCFuncAggQuery ctx -> do
QCFuncAggQuery ctx -> do
validateHdrs userInfo (_fqocHeaders ctx)
RS.convertFuncQueryAgg ctx fld
OCInsert _ ->
throw500 "unexpected OCInsert for query field context"
OCUpdate _ ->
throw500 "unexpected OCUpdate for query field context"
OCDelete _ ->
throw500 "unexpected OCDelete for query field context"
queryFldToSQL
:: ( MonadError QErr m, MonadReader r m, Has FieldMap r
, Has OrdByCtx r, Has SQLGenCtx r, Has UserInfo r
, Has OpCtxMap r
, Has QueryCtxMap r
)
=> PrepFn m
-> V.Field
@ -94,7 +88,7 @@ mutFldToTx
:: ( MonadError QErr m
, MonadReader r m
, Has UserInfo r
, Has OpCtxMap r
, Has MutationCtxMap r
, Has FieldMap r
, Has OrdByCtx r
, Has SQLGenCtx r
@ -106,33 +100,23 @@ mutFldToTx fld = do
userInfo <- asks getter
opCtx <- getOpCtx $ V._fName fld
case opCtx of
OCInsert ctx -> do
MCInsert ctx -> do
let roleName = userRole userInfo
validateHdrs userInfo (_iocHeaders ctx)
RI.convertInsert roleName (_iocTable ctx) fld
OCUpdate ctx -> do
MCUpdate ctx -> do
validateHdrs userInfo (_uocHeaders ctx)
RM.convertUpdate ctx fld
OCDelete ctx -> do
MCDelete ctx -> do
validateHdrs userInfo (_docHeaders ctx)
RM.convertDelete ctx fld
OCSelect _ ->
throw500 "unexpected query field context for a mutation field"
OCSelectPkey _ ->
throw500 "unexpected query field context for a mutation field"
OCSelectAgg _ ->
throw500 "unexpected query field context for a mutation field"
OCFuncQuery _ ->
throw500 "unexpected query field context for a mutation field"
OCFuncAggQuery _ ->
throw500 "unexpected query field context for a mutation field"
getOpCtx
:: ( MonadError QErr m
, MonadReader r m
, Has OpCtxMap r
, Has (OpCtxMap a) r
)
=> G.Name -> m OpCtx
=> G.Name -> m a
getOpCtx f = do
opCtxMap <- asks getter
onNothing (Map.lookup f opCtxMap) $ throw500 $

View File

@ -22,7 +22,7 @@ type OpExp = OpExpG UnresolvedVal
parseOpExps
:: (MonadError QErr m)
=> PGColType -> AnnInpVal -> m [OpExp]
=> PGColumnType -> AnnInpVal -> m [OpExp]
parseOpExps colTy annVal = do
opExpsM <- flip withObjectM annVal $ \nt objM -> forM objM $ \obj ->
forM (OMap.toList obj) $ \(k, v) ->
@ -56,8 +56,8 @@ parseOpExps colTy annVal = do
"_contained_in" -> fmap AContainedIn <$> asOpRhs v
"_has_key" -> fmap AHasKey <$> asOpRhs v
"_has_keys_any" -> fmap AHasKeysAny <$> asPGArray PGText v
"_has_keys_all" -> fmap AHasKeysAll <$> asPGArray PGText v
"_has_keys_any" -> fmap AHasKeysAny <$> asPGArray (PGColumnScalar PGText) v
"_has_keys_all" -> fmap AHasKeysAll <$> asPGArray (PGColumnScalar PGText) v
-- geometry/geography type related operators
"_st_contains" -> fmap ASTContains <$> asOpRhs v
@ -77,13 +77,18 @@ parseOpExps colTy annVal = do
<> showName k
return $ catMaybes $ fromMaybe [] opExpsM
where
asOpRhs = fmap (fmap UVPG) . asPGColValM
asOpRhs = fmap (fmap UVPG) . asPGColumnValueM
asPGArray rhsTy v = do
valsM <- parseMany asPGColVal v
valsM <- parseMany asPGColumnValue v
forM valsM $ \vals -> do
let arrayExp = S.SEArray $ map (txtEncoder . _apvValue) vals
return $ UVSQL $ S.SETyAnn arrayExp $ S.mkTypeAnn $ PgTypeArray rhsTy
let arrayExp = S.SEArray $ map (txtEncoder . pstValue . _apvValue) vals
return $ UVSQL $ S.SETyAnn arrayExp $ S.mkTypeAnn $
-- Safe here because asPGColumnValue ensured all the values are of the right type, but if the
-- list is empty, we dont actually have a scalar type to use, so we need to use
-- unsafePGColumnToRepresentation to create it. (It would be nice to refactor things to
-- somehow get rid of this.)
PGTypeArray (unsafePGColumnToRepresentation rhsTy)
resolveIsNull v = case _aivValue v of
AGScalar _ Nothing -> return Nothing
@ -95,18 +100,18 @@ parseOpExps colTy annVal = do
parseAsSTDWithinObj obj = do
distanceVal <- onNothing (OMap.lookup "distance" obj) $
throw500 "expected \"distance\" input field in st_d_within"
dist <- UVPG <$> asPGColVal distanceVal
dist <- UVPG <$> asPGColumnValue distanceVal
fromVal <- onNothing (OMap.lookup "from" obj) $
throw500 "expected \"from\" input field in st_d_within"
from <- UVPG <$> asPGColVal fromVal
from <- UVPG <$> asPGColumnValue fromVal
case colTy of
PGGeography -> do
PGColumnScalar PGGeography -> do
useSpheroidVal <-
onNothing (OMap.lookup "use_spheroid" obj) $
throw500 "expected \"use_spheroid\" input field in st_d_within"
useSpheroid <- UVPG <$> asPGColVal useSpheroidVal
useSpheroid <- UVPG <$> asPGColumnValue useSpheroidVal
return $ ASTDWithinGeog $ DWithinGeogOp dist from useSpheroid
PGGeometry ->
PGColumnScalar PGGeometry ->
return $ ASTDWithinGeom $ DWithinGeomOp dist from
_ -> throw500 "expected PGGeometry/PGGeography column for st_d_within"
@ -117,7 +122,7 @@ parseCastExpression =
withObjectM $ \_ objM -> forM objM $ \obj -> do
targetExps <- forM (OMap.toList obj) $ \(targetTypeName, castedComparisonExpressionInput) -> do
let targetType = txtToPgColTy $ G.unName targetTypeName
castedComparisonExpressions <- parseOpExps targetType castedComparisonExpressionInput
castedComparisonExpressions <- parseOpExps (PGColumnScalar targetType) castedComparisonExpressionInput
return (targetType, castedComparisonExpressions)
return $ Map.fromList targetExps

View File

@ -52,7 +52,7 @@ import qualified Hasura.SQL.DML as S
getFldInfo
:: (MonadError QErr m, MonadReader r m, Has FieldMap r)
=> G.NamedType -> G.Name
-> m (Either PGColInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int))
-> m (Either PGColumnInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int))
getFldInfo nt n = do
fldMap <- asks getter
onNothing (Map.lookup (nt,n) fldMap) $
@ -61,7 +61,7 @@ getFldInfo nt n = do
getPGColInfo
:: (MonadError QErr m, MonadReader r m, Has FieldMap r)
=> G.NamedType -> G.Name -> m PGColInfo
=> G.NamedType -> G.Name -> m PGColumnInfo
getPGColInfo nt n = do
fldInfo <- getFldInfo nt n
case fldInfo of
@ -112,10 +112,8 @@ withArgM args arg f = prependArgsInPath $ nameAsPath arg $
type PrepArgs = Seq.Seq Q.PrepArg
prepare
:: (MonadState PrepArgs m) => AnnPGVal -> m S.SQLExp
prepare (AnnPGVal _ _ colTy colVal) =
prepareColVal colTy colVal
prepare :: (MonadState PrepArgs m) => AnnPGVal -> m S.SQLExp
prepare (AnnPGVal _ _ _ scalarValue) = prepareColVal scalarValue
resolveValPrep
:: (MonadState PrepArgs m)
@ -136,15 +134,14 @@ withPrepArgs m = runStateT m Seq.empty
prepareColVal
:: (MonadState PrepArgs m)
=> PGColType -> PGColValue -> m S.SQLExp
prepareColVal colTy colVal = do
=> WithScalarType PGScalarValue -> m S.SQLExp
prepareColVal (WithScalarType scalarType colVal) = do
preparedArgs <- get
put (preparedArgs Seq.|> binEncoder colVal)
return $ toPrepParam (Seq.length preparedArgs + 1) colTy
return $ toPrepParam (Seq.length preparedArgs + 1) scalarType
txtConverter :: Applicative f => AnnPGVal -> f S.SQLExp
txtConverter (AnnPGVal _ _ a b) =
pure $ toTxtValue a b
txtConverter (AnnPGVal _ _ _ scalarValue) = pure $ toTxtValue scalarValue
withSelSet :: (Monad m) => SelSet -> (Field -> m a) -> m [(Text, a)]
withSelSet selSet f =

View File

@ -1,8 +1,9 @@
module Hasura.GraphQL.Resolve.InputValue
( withNotNull
, tyMismatch
, asPGColValM
, asPGColVal
, asPGColumnTypeAndValueM
, asPGColumnValueM
, asPGColumnValue
, asEnumVal
, asEnumValM
, withObject
@ -19,12 +20,14 @@ module Hasura.GraphQL.Resolve.InputValue
import Hasura.Prelude
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Hasura.RQL.Types as RQL
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.Types
import Hasura.SQL.Types
import Hasura.SQL.Value
withNotNull
@ -41,43 +44,43 @@ tyMismatch expectedTy v =
getAnnInpValKind (_aivValue v) <> " for value of type " <>
G.showGT (_aivType v)
asPGColValM
asPGColumnTypeAndValueM
:: (MonadError QErr m)
=> AnnInpVal -> m (Maybe AnnPGVal)
asPGColValM annInpVal = case val of
AGScalar colTy valM ->
return $ fmap (AnnPGVal varM (G.isNullable ty) colTy) valM
_ ->
tyMismatch "pgvalue" annInpVal
where
AnnInpVal ty varM val = annInpVal
=> AnnInpVal
-> m (PGColumnType, WithScalarType (Maybe PGScalarValue))
asPGColumnTypeAndValueM v = case _aivValue v of
AGScalar colTy val -> pure (PGColumnScalar colTy, WithScalarType colTy val)
AGEnum _ (AGEReference reference maybeValue) -> do
let maybeScalarValue = PGValText . RQL.getEnumValue <$> maybeValue
pure (PGColumnEnumReference reference, WithScalarType PGText maybeScalarValue)
_ -> tyMismatch "pgvalue" v
asPGColVal
:: (MonadError QErr m)
=> AnnInpVal -> m AnnPGVal
asPGColVal v = case _aivValue v of
AGScalar colTy (Just val) ->
return $ AnnPGVal (_aivVariable v) (G.isNullable (_aivType v)) colTy val
AGScalar colTy Nothing ->
throw500 $ "unexpected null for ty "
<> T.pack (show colTy)
_ -> tyMismatch "pgvalue" v
asPGColumnTypeAndAnnValueM :: (MonadError QErr m) => AnnInpVal -> m (PGColumnType, Maybe AnnPGVal)
asPGColumnTypeAndAnnValueM v = do
(columnType, scalarValueM) <- asPGColumnTypeAndValueM v
let mkAnnPGColVal = AnnPGVal (_aivVariable v) (G.isNullable (_aivType v)) columnType
pure (columnType, mkAnnPGColVal <$> sequence scalarValueM)
asEnumVal
:: (MonadError QErr m)
=> AnnInpVal -> m (G.NamedType, G.EnumValue)
asEnumVal v = case _aivValue v of
AGEnum ty (Just val) -> return (ty, val)
AGEnum ty Nothing ->
throw500 $ "unexpected null for ty " <> showNamedTy ty
_ -> tyMismatch "enum" v
asPGColumnValueM :: (MonadError QErr m) => AnnInpVal -> m (Maybe AnnPGVal)
asPGColumnValueM = fmap snd . asPGColumnTypeAndAnnValueM
asEnumValM
:: (MonadError QErr m)
=> AnnInpVal -> m (G.NamedType, Maybe G.EnumValue)
asPGColumnValue :: (MonadError QErr m) => AnnInpVal -> m AnnPGVal
asPGColumnValue v = do
(columnType, annPGValM) <- asPGColumnTypeAndAnnValueM v
onNothing annPGValM $ throw500 ("unexpected null for type " <>> columnType)
-- | Note: only handles “synthetic” enums (see 'EnumValuesInfo'). Enum table references are handled
-- by 'asPGColumnTypeAndValueM' and its variants.
asEnumVal :: (MonadError QErr m) => AnnInpVal -> m (G.NamedType, G.EnumValue)
asEnumVal = asEnumValM >=> \case
(ty, Just val) -> pure (ty, val)
(ty, Nothing) -> throw500 $ "unexpected null for ty " <> showNamedTy ty
-- | Like 'asEnumVal', only handles “synthetic” enums.
asEnumValM :: (MonadError QErr m) => AnnInpVal -> m (G.NamedType, Maybe G.EnumValue)
asEnumValM v = case _aivValue v of
AGEnum ty valM -> return (ty, valM)
_ -> tyMismatch "enum" v
AGEnum ty (AGESynthetic valM) -> return (ty, valM)
_ -> tyMismatch "enum" v
withObject
:: (MonadError QErr m)
@ -136,7 +139,7 @@ parseMany fn v = case _aivValue v of
onlyText
:: (MonadError QErr m)
=> PGColValue -> m Text
=> PGScalarValue -> m Text
onlyText = \case
PGValText t -> return t
PGValVarchar t -> return t
@ -146,12 +149,12 @@ asPGColText
:: (MonadError QErr m)
=> AnnInpVal -> m Text
asPGColText val = do
pgColVal <- _apvValue <$> asPGColVal val
pgColVal <- pstValue . _apvValue <$> asPGColumnValue val
onlyText pgColVal
asPGColTextM
:: (MonadError QErr m)
=> AnnInpVal -> m (Maybe Text)
asPGColTextM val = do
pgColValM <- fmap _apvValue <$> asPGColValM val
pgColValM <- fmap (pstValue . _apvValue) <$> asPGColumnValueM val
mapM onlyText pgColValM

View File

@ -5,7 +5,6 @@ where
import Data.Has
import Hasura.EncJSON
import Hasura.Prelude
import Hasura.Server.Utils
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
@ -19,7 +18,6 @@ import qualified Language.GraphQL.Draft.Syntax as G
import qualified Database.PG.Query as Q
import qualified Hasura.RQL.DML.Insert as RI
import qualified Hasura.RQL.DML.Returning as RR
import qualified Hasura.RQL.GBoolExp as RB
import qualified Hasura.SQL.DML as S
@ -49,7 +47,7 @@ data AnnIns a
{ _aiInsObj :: !a
, _aiConflictClause :: !(Maybe RI.ConflictClauseP1)
, _aiView :: !QualifiedTable
, _aiTableCols :: ![PGColInfo]
, _aiTableCols :: ![PGColumnInfo]
, _aiDefVals :: !(Map.HashMap PGCol S.SQLExp)
} deriving (Show, Eq, Functor, Foldable, Traversable)
@ -71,7 +69,7 @@ data RelIns a
type ObjRelIns = RelIns SingleObjIns
type ArrRelIns = RelIns MultiObjIns
type PGColWithValue = (PGCol, PGColValue)
type PGColWithValue = (PGCol, WithScalarType PGScalarValue)
data CTEExp
= CTEExp
@ -81,7 +79,7 @@ data CTEExp
data AnnInsObj
= AnnInsObj
{ _aioColumns :: ![(PGCol, PGColType, PGColValue)]
{ _aioColumns :: ![PGColWithValue]
, _aioObjRels :: ![ObjRelIns]
, _aioArrRels :: ![ArrRelIns]
} deriving (Show, Eq)
@ -104,12 +102,17 @@ traverseInsObj
-> m AnnInsObj
traverseInsObj rim (gName, annVal) defVal@(AnnInsObj cols objRels arrRels) =
case _aivValue annVal of
AGScalar colty mColVal -> do
let col = PGCol $ G.unName gName
colVal = fromMaybe (PGNull colty) mColVal
return (AnnInsObj ((col, colty, colVal):cols) objRels arrRels)
AGScalar{} -> parseValue
AGEnum{} -> parseValue
_ -> parseObject
where
parseValue = do
(_, WithScalarType scalarType maybeScalarValue) <- asPGColumnTypeAndValueM annVal
let columnName = PGCol $ G.unName gName
scalarValue = fromMaybe (PGNull scalarType) maybeScalarValue
pure $ AnnInsObj ((columnName, WithScalarType scalarType scalarValue):cols) objRels arrRels
_ -> do
parseObject = do
objM <- asObjectM annVal
-- if relational insert input is 'null' then ignore
-- return default value
@ -124,8 +127,7 @@ traverseInsObj rim (gName, annVal) defVal@(AnnInsObj cols objRels arrRels) =
let rTable = riRTable relInfo
InsCtx rtView rtCols rtDefVals rtRelInfoMap rtUpdPerm <- getInsCtx rTable
rtDefValsRes <- mapM (convPartialSQLExp sessVarFromCurrentSetting)
rtDefVals
rtDefValsRes <- mapM (convPartialSQLExp sessVarFromCurrentSetting) rtDefVals
withPathK (G.unName gName) $ case riType relInfo of
ObjRel -> do
@ -185,11 +187,11 @@ parseOnConflict tn updFiltrM val = withPathK "on_conflict" $
toSQLExps
:: (MonadError QErr m, MonadState PrepArgs m)
=> [(PGCol, PGColType, PGColValue)]
=> [PGColWithValue]
-> m [(PGCol, S.SQLExp)]
toSQLExps cols =
forM cols $ \(c, ty, v) -> do
prepExp <- prepareColVal ty v
forM cols $ \(c, v) -> do
prepExp <- prepareColVal v
return (c, prepExp)
mkSQLRow :: Map.HashMap PGCol S.SQLExp -> [(PGCol, S.SQLExp)] -> [S.SQLExp]
@ -200,7 +202,7 @@ mkInsertQ
:: MonadError QErr m
=> QualifiedTable
-> Maybe RI.ConflictClauseP1
-> [(PGCol, PGColType, PGColValue)]
-> [PGColWithValue]
-> [PGCol]
-> Map.HashMap PGCol S.SQLExp
-> RoleName
@ -230,21 +232,21 @@ asSingleObject = \case
fetchFromColVals
:: MonadError QErr m
=> ColVals
-> [PGColInfo]
-> (PGColInfo -> a)
-> m [(a, PGColValue)]
-> [PGColumnInfo]
-> (PGColumnInfo -> a)
-> m [(a, WithScalarType PGScalarValue)]
fetchFromColVals colVal reqCols f =
forM reqCols $ \ci -> do
let valM = Map.lookup (pgiName ci) colVal
val <- onNothing valM $ throw500 $ "column "
<> pgiName ci <<> " not found in given colVal"
pgColVal <- RB.pgValParser (pgiType ci) val
pgColVal <- parsePGScalarValue (pgiType ci) val
return (f ci, pgColVal)
mkSelCTE
:: MonadError QErr m
=> QualifiedTable
-> [PGColInfo]
-> [PGColumnInfo]
-> Maybe ColVals
-> m CTEExp
mkSelCTE tn allCols colValM = do
@ -365,7 +367,7 @@ insertObj
-> Q.TxE QErr (Int, CTEExp)
insertObj strfyNum role tn singleObjIns addCols = do
-- validate insert
validateInsert (map _1 cols) (map _riRelInfo objRels) $ map fst addCols
validateInsert (map fst cols) (map _riRelInfo objRels) $ map fst addCols
-- insert all object relations and fetch this insert dependent column values
objInsRes <- forM objRels $ insertObjRel strfyNum role
@ -373,9 +375,7 @@ insertObj strfyNum role tn singleObjIns addCols = do
-- prepare final insert columns
let objRelAffRows = sum $ map fst objInsRes
objRelDeterminedCols = concatMap snd objInsRes
objRelInsCols = mkPGColWithTypeAndVal allCols objRelDeterminedCols
addInsCols = mkPGColWithTypeAndVal allCols addCols
finalInsCols = cols <> objRelInsCols <> addInsCols
finalInsCols = cols <> objRelDeterminedCols <> addCols
-- prepare insert query as with expression
(CTEExp cte insPArgs, ccM) <-
@ -435,10 +435,9 @@ insertMultipleObjects strfyNum role tn multiObjIns addCols mutFlds errP =
-- insert all column rows at one go
withoutRelsInsert = withErrPath $ do
indexedForM_ insCols $ \insCol ->
validateInsert (map _1 insCol) [] $ map fst addCols
validateInsert (map fst insCol) [] $ map fst addCols
let addColsWithType = mkPGColWithTypeAndVal tableColInfos addCols
withAddCols = flip map insCols $ union addColsWithType
let withAddCols = flip map insCols $ union addCols
tableCols = map pgiName tableColInfos
(sqlRows, prepArgs) <- flip runStateT Seq.Empty $ do
@ -533,10 +532,3 @@ mergeListsWith [] _ _ _ = []
mergeListsWith (x:xs) l b f = case find (b x) l of
Nothing -> mergeListsWith xs l b f
Just y -> f x y : mergeListsWith xs l b f
mkPGColWithTypeAndVal :: [PGColInfo] -> [PGColWithValue]
-> [(PGCol, PGColType, PGColValue)]
mkPGColWithTypeAndVal pgColInfos pgColWithVal =
mergeListsWith pgColInfos pgColWithVal
(\ci (c, _) -> pgiName ci == c)
(\ci (c, v) -> (c, pgiType ci, v))

View File

@ -6,19 +6,20 @@ module Hasura.GraphQL.Resolve.Introspect
import Data.Has
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Resolve.InputValue
import Hasura.GraphQL.Validate.InputValue
import Hasura.GraphQL.Validate.Context
import Hasura.GraphQL.Validate.Field
import Hasura.GraphQL.Validate.InputValue
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.Types
import Hasura.SQL.Types
import Hasura.SQL.Value
data TypeKind
@ -163,7 +164,7 @@ enumTypeR (EnumTyInfo descM n vals _) fld =
"name" -> retJ $ namedTyToTxt n
"description" -> retJ $ fmap G.unDescription descM
"enumValues" -> fmap J.toJSON $ mapM (enumValueR subFld) $
sortOn _eviVal $ Map.elems vals
sortOn _eviVal $ Map.elems (normalizeEnumValues vals)
_ -> return J.Null
-- 4.5.2.6
@ -339,7 +340,7 @@ typeR
=> Field -> m J.Value
typeR fld = do
name <- withArg args "name" $ \arg -> do
pgColVal <- _apvValue <$> asPGColVal arg
pgColVal <- pstValue . _apvValue <$> asPGColumnValue arg
case pgColVal of
PGValText t -> return t
_ -> throw500 "expecting string for name arg of __type"

View File

@ -18,8 +18,8 @@ import qualified Hasura.RQL.DML.Delete as RD
import qualified Hasura.RQL.DML.Returning as RR
import qualified Hasura.RQL.DML.Update as RU
import qualified Hasura.SQL.DML as S
import qualified Hasura.RQL.DML.Select as RS
import qualified Hasura.SQL.DML as S
import Hasura.EncJSON
import Hasura.GraphQL.Resolve.BoolExp
@ -60,7 +60,7 @@ convertRowObj
convertRowObj val =
flip withObject val $ \_ obj ->
forM (OMap.toList obj) $ \(k, v) -> do
prepExpM <- fmap UVPG <$> asPGColValM v
prepExpM <- fmap UVPG <$> asPGColumnValueM v
let prepExp = fromMaybe (UVSQL $ S.SEUnsafe "NULL") prepExpM
return (PGCol $ G.unName k, prepExp)
@ -83,7 +83,7 @@ convObjWithOp
=> ApplySQLOp -> AnnInpVal -> m [(PGCol, UnresolvedVal)]
convObjWithOp opFn val =
flip withObject val $ \_ obj -> forM (OMap.toList obj) $ \(k, v) -> do
colVal <- _apvValue <$> asPGColVal v
colVal <- pstValue . _apvValue <$> asPGColumnValue v
let pgCol = PGCol $ G.unName k
-- TODO: why are we using txtEncoder here?
encVal = txtEncoder colVal
@ -95,8 +95,8 @@ convDeleteAtPathObj
=> AnnInpVal -> m [(PGCol, UnresolvedVal)]
convDeleteAtPathObj val =
flip withObject val $ \_ obj -> forM (OMap.toList obj) $ \(k, v) -> do
vals <- flip withArray v $ \_ annVals -> mapM asPGColVal annVals
let valExps = map (txtEncoder . _apvValue) vals
vals <- flip withArray v $ \_ annVals -> mapM asPGColumnValue annVals
let valExps = map (txtEncoder . pstValue . _apvValue) vals
pgCol = PGCol $ G.unName k
annEncVal = S.SETyAnn (S.SEArray valExps) S.textArrTypeAnn
sqlExp = S.SEOpApp S.jsonbDeleteAtPathOp

View File

@ -115,7 +115,7 @@ parseTableArgs args = do
ordByExpML <- withArgM args "order_by" parseOrderBy
let ordByExpM = NE.nonEmpty =<< ordByExpML
limitExpM <- withArgM args "limit" parseLimit
offsetExpM <- withArgM args "offset" $ asPGColVal >=> txtConverter
offsetExpM <- withArgM args "offset" $ asPGColumnValue >=> txtConverter
distOnColsML <- withArgM args "distinct_on" parseColumns
let distOnColsM = NE.nonEmpty =<< distOnColsML
mapM_ (validateDistOn ordByExpM) distOnColsM
@ -255,7 +255,7 @@ parseOrderByEnum = \case
parseLimit :: ( MonadError QErr m ) => AnnInpVal -> m Int
parseLimit v = do
pgColVal <- _apvValue <$> asPGColVal v
pgColVal <- pstValue . _apvValue <$> asPGColumnValue v
limit <- maybe noIntErr return $ pgColValueToInt pgColVal
-- validate int value
onlyPositiveInt limit
@ -273,7 +273,7 @@ pgColValToBoolExp
pgColValToBoolExp colArgMap colValMap = do
colExps <- forM colVals $ \(name, val) ->
BoolFld <$> do
opExp <- AEQ True . UVPG <$> asPGColVal val
opExp <- AEQ True . UVPG <$> asPGColumnValue val
colInfo <- onNothing (Map.lookup name colArgMap) $
throw500 $ "column name " <> showName name
<> " not found in column arguments map"
@ -341,7 +341,7 @@ convertCount args = do
maybe (return S.CTStar) (mkCType isDistinct) columnsM
where
parseDistinct v = do
val <- _apvValue <$> asPGColVal v
val <- pstValue . _apvValue <$> asPGColumnValue v
case val of
PGValBoolean b -> return b
_ ->
@ -417,7 +417,7 @@ parseFunctionArgs
parseFunctionArgs argSeq val = fmap catMaybes $
flip withObject val $ \_ obj ->
fmap toList $ forM argSeq $ \(FuncArgItem argName) ->
forM (OMap.lookup argName obj) $ fmap (maybe nullSQL UVPG) . asPGColValM
forM (OMap.lookup argName obj) $ fmap (maybe nullSQL UVPG) . asPGColumnValueM
where
nullSQL = UVSQL $ S.SEUnsafe "NULL"

View File

@ -8,6 +8,7 @@ import qualified Data.Text as T
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.RQL.Types.BoolExp
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Permission
import Hasura.SQL.Types
@ -15,7 +16,23 @@ import Hasura.SQL.Value
import qualified Hasura.SQL.DML as S
type OpCtxMap = Map.HashMap G.Name OpCtx
data QueryCtx
= QCSelect !SelOpCtx
| QCSelectPkey !SelPkOpCtx
| QCSelectAgg !SelOpCtx
| QCFuncQuery !FuncQOpCtx
| QCFuncAggQuery !FuncQOpCtx
deriving (Show, Eq)
data MutationCtx
= MCInsert !InsOpCtx
| MCUpdate !UpdOpCtx
| MCDelete !DelOpCtx
deriving (Show, Eq)
type OpCtxMap a = Map.HashMap G.Name a
type QueryCtxMap = OpCtxMap QueryCtx
type MutationCtxMap = OpCtxMap MutationCtx
data InsOpCtx
= InsOpCtx
@ -55,7 +72,7 @@ data UpdOpCtx
, _uocHeaders :: ![T.Text]
, _uocFilter :: !AnnBoolExpPartialSQL
, _uocPresetCols :: !PreSetColsPartial
, _uocAllCols :: ![PGColInfo]
, _uocAllCols :: ![PGColumnInfo]
} deriving (Show, Eq)
data DelOpCtx
@ -63,7 +80,7 @@ data DelOpCtx
{ _docTable :: !QualifiedTable
, _docHeaders :: ![T.Text]
, _docFilter :: !AnnBoolExpPartialSQL
, _docAllCols :: ![PGColInfo]
, _docAllCols :: ![PGColumnInfo]
} deriving (Show, Eq)
data OpCtx
@ -79,11 +96,11 @@ data OpCtx
type FieldMap
= Map.HashMap (G.NamedType, G.Name)
(Either PGColInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int))
(Either PGColumnInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int))
-- order by context
data OrdByItem
= OBIPGCol !PGColInfo
= OBIPGCol !PGColumnInfo
| OBIRel !RelInfo !AnnBoolExpPartialSQL
| OBIAgg !RelInfo !AnnBoolExpPartialSQL
deriving (Show, Eq)
@ -111,7 +128,7 @@ data UpdPermForIns
data InsCtx
= InsCtx
{ icView :: !QualifiedTable
, icAllCols :: ![PGColInfo]
, icAllCols :: ![PGColumnInfo]
, icSet :: !PreSetColsPartial
, icRelations :: !RelationInfoMap
, icUpdPerm :: !(Maybe UpdPermForIns)
@ -119,14 +136,18 @@ data InsCtx
type InsCtxMap = Map.HashMap QualifiedTable InsCtx
type PGColArgMap = Map.HashMap G.Name PGColInfo
type PGColArgMap = Map.HashMap G.Name PGColumnInfo
data AnnPGVal
= AnnPGVal
{ _apvVariable :: !(Maybe G.Variable)
, _apvIsNullable :: !Bool
, _apvType :: !PGColType
, _apvValue :: !PGColValue
, _apvType :: !PGColumnType
-- ^ Note: '_apvValue' is a @'WithScalarType' 'PGScalarValue'@, so it includes its type as a
-- 'PGScalarType'. However, we /also/ need to keep the original 'PGColumnType' information around
-- in case we need to re-parse a new value with its type because were reusing a cached query
-- plan.
, _apvValue :: !(WithScalarType PGScalarValue)
} deriving (Show, Eq)
type PrepFn m = AnnPGVal -> m S.SQLExp
@ -140,7 +161,7 @@ partialSQLExpToUnresolvedVal = \case
-- A value that will be converted to an sql expression eventually
data UnresolvedVal
-- From a session variable
= UVSessVar !PgType !SessVar
= UVSessVar !(PGType PGScalarType) !SessVar
-- This is postgres
| UVPG !AnnPGVal
-- This is a full resolved sql expression

View File

@ -4,7 +4,8 @@ module Hasura.GraphQL.Schema
, buildGCtxMapPG
, getGCtx
, GCtx(..)
, OpCtx(..)
, QueryCtx(..)
, MutationCtx(..)
, InsCtx(..)
, InsCtxMap
, RelationInfoMap
@ -16,6 +17,7 @@ module Hasura.GraphQL.Schema
, checkSchemaConflicts
) where
import Control.Lens.Extended hiding (op)
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
@ -43,16 +45,16 @@ import Hasura.GraphQL.Schema.OrderBy
import Hasura.GraphQL.Schema.Select
import Hasura.GraphQL.Schema.Merge
getInsPerm :: TableInfo -> RoleName -> Maybe InsPermInfo
getInsPerm :: TableInfo PGColumnInfo -> RoleName -> Maybe InsPermInfo
getInsPerm tabInfo role
| role == adminRole = _permIns $ mkAdminRolePermInfo tabInfo
| otherwise = Map.lookup role rolePermInfoMap >>= _permIns
where
rolePermInfoMap = tiRolePermInfoMap tabInfo
rolePermInfoMap = _tiRolePermInfoMap tabInfo
getTabInfo
:: MonadError QErr m
=> TableCache -> QualifiedTable -> m TableInfo
=> TableCache PGColumnInfo -> QualifiedTable -> m (TableInfo PGColumnInfo)
getTabInfo tc t =
onNothing (Map.lookup t tc) $
throw500 $ "table not found: " <>> t
@ -66,32 +68,32 @@ isValidCol = isValidName . G.Name . getPGColTxt
isValidRel :: ToTxt a => RelName -> QualifiedObject a -> Bool
isValidRel rn rt = isValidName (mkRelName rn) && isValidObjectName rt
isValidField :: FieldInfo -> Bool
isValidField :: FieldInfo PGColumnInfo -> Bool
isValidField = \case
FIColumn (PGColInfo col _ _) -> isValidCol col
FIColumn (PGColumnInfo col _ _) -> isValidCol col
FIRelationship (RelInfo rn _ _ remTab _) -> isValidRel rn remTab
upsertable :: [ConstraintName] -> Bool -> Bool -> Bool
upsertable uniqueOrPrimaryCons isUpsertAllowed view =
not (null uniqueOrPrimaryCons) && isUpsertAllowed && not view
upsertable uniqueOrPrimaryCons isUpsertAllowed isAView =
not (null uniqueOrPrimaryCons) && isUpsertAllowed && not isAView
toValidFieldInfos :: FieldInfoMap -> [FieldInfo]
toValidFieldInfos :: FieldInfoMap PGColumnInfo -> [FieldInfo PGColumnInfo]
toValidFieldInfos = filter isValidField . Map.elems
validPartitionFieldInfoMap :: FieldInfoMap -> ([PGColInfo], [RelInfo])
validPartitionFieldInfoMap :: FieldInfoMap PGColumnInfo -> ([PGColumnInfo], [RelInfo])
validPartitionFieldInfoMap = partitionFieldInfos . toValidFieldInfos
getValidCols :: FieldInfoMap -> [PGColInfo]
getValidCols :: FieldInfoMap PGColumnInfo -> [PGColumnInfo]
getValidCols = fst . validPartitionFieldInfoMap
getValidRels :: FieldInfoMap -> [RelInfo]
getValidRels :: FieldInfoMap PGColumnInfo -> [RelInfo]
getValidRels = snd . validPartitionFieldInfoMap
mkValidConstraints :: [ConstraintName] -> [ConstraintName]
mkValidConstraints =
filter (isValidName . G.Name . getConstraintTxt)
isRelNullable :: FieldInfoMap -> RelInfo -> Bool
isRelNullable :: FieldInfoMap PGColumnInfo -> RelInfo -> Bool
isRelNullable fim ri = isNullable
where
lCols = map fst $ riMapping ri
@ -112,24 +114,26 @@ isAggFld = flip elem (numAggOps <> compAggOps)
mkGCtxRole'
:: QualifiedTable
-- insert permission
-> Maybe ([PGColInfo], RelationInfoMap)
-- select permission
-> Maybe ([PGColumnInfo], RelationInfoMap)
-- ^ insert permission
-> Maybe (Bool, [SelField])
-- update cols
-> Maybe [PGColInfo]
-- delete cols
-- ^ select permission
-> Maybe [PGColumnInfo]
-- ^ update cols
-> Maybe ()
-- primary key columns
-> [PGColInfo]
-- constraints
-- ^ delete cols
-> [PGColumnInfo]
-- ^ primary key columns
-> [ConstraintName]
-- ^ constraints
-> Maybe ViewInfo
-- all functions
-> [FunctionInfo]
-- ^ all functions
-> Maybe EnumValues
-- ^ present iff this table is an enum table (see "Hasura.RQL.Schema.Enum")
-> TyAgg
mkGCtxRole' tn insPermM selPermM updColsM
delPermM pkeyCols constraints viM funcs =
delPermM pkeyCols constraints viM funcs enumValuesM =
TyAgg (mkTyInfoMap allTypes) fieldMap scalars ordByCtx
where
@ -162,6 +166,7 @@ mkGCtxRole' tn insPermM selPermM updColsM
, TIInpObj <$> mutHelper viIsUpdatable updIncInpObjM
, TIObj <$> mutRespObjM
, TIEnum <$> selColInpTyM
, TIEnum <$> tableEnumTypeM
]
mutHelper :: (ViewInfo -> Bool) -> Maybe a -> Maybe a
@ -171,7 +176,7 @@ mkGCtxRole' tn insPermM selPermM updColsM
[ insInpObjFldsM, updSetInpObjFldsM
, boolExpInpObjFldsM , selObjFldsM
]
scalars = Set.unions [selByPkScalarSet, funcArgScalarSet]
scalars = selByPkScalarSet <> funcArgScalarSet
-- helper
mkColFldMap ty cols = Map.fromList $ flip map cols $
@ -209,8 +214,7 @@ mkGCtxRole' tn insPermM selPermM updColsM
-- funcargs input type
funcArgInpObjs = mapMaybe mkFuncArgsInp funcs
-- funcArgCtx = Map.unions funcArgCtxs
funcArgScalarSet = Set.fromList $
concatMap (map faType . toList . fiInputArgs) funcs
funcArgScalarSet = funcs ^.. folded.to fiInputArgs.folded.to faType
-- helper
mkFldMap ty = Map.fromList . concatMap (mkFld ty)
@ -259,54 +263,62 @@ mkGCtxRole' tn insPermM selPermM updColsM
getCompCols = onlyComparableCols . lefts
onlyFloat = const $ mkScalarTy PGFloat
mkTypeMaker "sum" = mkScalarTy
mkTypeMaker "sum" = mkColumnType
mkTypeMaker _ = onlyFloat
mkColAggFldsObjs flds =
let numCols = getNumCols flds
compCols = getCompCols flds
mkNumObjFld n = mkTableColAggFldsObj tn n (mkTypeMaker n) numCols
mkCompObjFld n = mkTableColAggFldsObj tn n mkScalarTy compCols
mkCompObjFld n = mkTableColAggFldsObj tn n mkColumnType compCols
numFldsObjs = bool (map mkNumObjFld numAggOps) [] $ null numCols
compFldsObjs = bool (map mkCompObjFld compAggOps) [] $ null compCols
in numFldsObjs <> compFldsObjs
-- the fields used in table object
selObjFldsM = mkFldMap (mkTableTy tn) <$> selFldsM
-- the scalar set for table_by_pk arguments
selByPkScalarSet = Set.fromList $ map pgiType pkeyCols
selByPkScalarSet = pkeyCols ^.. folded.to pgiType._PGColumnScalar
ordByInpCtxM = mkOrdByInpObj tn <$> selFldsM
(ordByInpObjM, ordByCtxM) = case ordByInpCtxM of
Just (a, b) -> (Just a, Just b)
Nothing -> (Nothing, Nothing)
tableEnumTypeM = enumValuesM <&> \enumValues ->
mkHsraEnumTyInfo Nothing (mkTableEnumType tn) $
EnumValuesReference (EnumReference tn enumValues)
getRootFldsRole'
:: QualifiedTable
-> [PGCol]
-> [ConstraintName]
-> FieldInfoMap
-> FieldInfoMap PGColumnInfo
-> [FunctionInfo]
-> Maybe ([T.Text], Bool) -- insert perm
-> Maybe (AnnBoolExpPartialSQL, Maybe Int, [T.Text], Bool) -- select filter
-> Maybe ([PGCol], PreSetColsPartial, AnnBoolExpPartialSQL, [T.Text]) -- update filter
-> Maybe (AnnBoolExpPartialSQL, [T.Text]) -- delete filter
-> Maybe ViewInfo
-> RootFlds
-> RootFields
getRootFldsRole' tn primCols constraints fields funcs insM selM updM delM viM =
RootFlds mFlds
RootFields
{ rootQueryFields = makeFieldMap
$ funcQueries
<> funcAggQueries
<> catMaybes
[ getSelDet <$> selM
, getSelAggDet selM
, getPKeySelDet selM $ getColInfos primCols colInfos
]
, rootMutationFields = makeFieldMap $ catMaybes
[ mutHelper viIsInsertable getInsDet insM
, mutHelper viIsUpdatable getUpdDet updM
, mutHelper viIsDeletable getDelDet delM
]
}
where
makeFieldMap = mapFromL (_fiName . snd)
allCols = getCols fields
mFlds = mapFromL (either _fiName _fiName . snd) $
funcQueries <>
funcAggQueries <>
catMaybes
[ mutHelper viIsInsertable getInsDet insM
, mutHelper viIsUpdatable getUpdDet updM
, mutHelper viIsDeletable getDelDet delM
, getSelDet <$> selM, getSelAggDet selM
, getPKeySelDet selM $ getColInfos primCols colInfos
]
funcQueries = maybe [] getFuncQueryFlds selM
funcAggQueries = maybe [] getFuncAggQueryFlds selM
@ -317,65 +329,65 @@ getRootFldsRole' tn primCols constraints fields funcs insM selM updM delM viM =
colInfos = fst $ validPartitionFieldInfoMap fields
getInsDet (hdrs, upsertPerm) =
let isUpsertable = upsertable constraints upsertPerm $ isJust viM
in ( OCInsert $ InsOpCtx tn $ hdrs `union` maybe [] (\(_, _, _, x) -> x) updM
, Right $ mkInsMutFld tn isUpsertable
in ( MCInsert . InsOpCtx tn $ hdrs `union` maybe [] (\(_, _, _, x) -> x) updM
, mkInsMutFld tn isUpsertable
)
getUpdDet (updCols, preSetCols, updFltr, hdrs) =
( OCUpdate $ UpdOpCtx tn hdrs updFltr preSetCols allCols
, Right $ mkUpdMutFld tn $ getColInfos updCols colInfos
( MCUpdate $ UpdOpCtx tn hdrs updFltr preSetCols allCols
, mkUpdMutFld tn $ getColInfos updCols colInfos
)
getDelDet (delFltr, hdrs) =
( OCDelete $ DelOpCtx tn hdrs delFltr allCols
, Right $ mkDelMutFld tn
( MCDelete $ DelOpCtx tn hdrs delFltr allCols
, mkDelMutFld tn
)
getSelDet (selFltr, pLimit, hdrs, _) =
selFldHelper OCSelect mkSelFld selFltr pLimit hdrs
selFldHelper QCSelect mkSelFld selFltr pLimit hdrs
getSelAggDet (Just (selFltr, pLimit, hdrs, True)) =
Just $ selFldHelper OCSelectAgg mkAggSelFld selFltr pLimit hdrs
Just $ selFldHelper QCSelectAgg mkAggSelFld selFltr pLimit hdrs
getSelAggDet _ = Nothing
selFldHelper f g pFltr pLimit hdrs =
( f $ SelOpCtx tn hdrs pFltr pLimit
, Left $ g tn
, g tn
)
getPKeySelDet Nothing _ = Nothing
getPKeySelDet _ [] = Nothing
getPKeySelDet (Just (selFltr, _, hdrs, _)) pCols = Just
( OCSelectPkey $ SelPkOpCtx tn hdrs selFltr $
( QCSelectPkey . SelPkOpCtx tn hdrs selFltr $
mapFromL (mkColName . pgiName) pCols
, Left $ mkSelFldPKey tn pCols
, mkSelFldPKey tn pCols
)
getFuncQueryFlds (selFltr, pLimit, hdrs, _) =
funcFldHelper OCFuncQuery mkFuncQueryFld selFltr pLimit hdrs
funcFldHelper QCFuncQuery mkFuncQueryFld selFltr pLimit hdrs
getFuncAggQueryFlds (selFltr, pLimit, hdrs, True) =
funcFldHelper OCFuncAggQuery mkFuncAggQueryFld selFltr pLimit hdrs
funcFldHelper QCFuncAggQuery mkFuncAggQueryFld selFltr pLimit hdrs
getFuncAggQueryFlds _ = []
funcFldHelper f g pFltr pLimit hdrs =
flip map funcs $ \fi ->
( f $ FuncQOpCtx tn hdrs pFltr pLimit (fiName fi) $ mkFuncArgItemSeq fi
, Left $ g fi
( f . FuncQOpCtx tn hdrs pFltr pLimit (fiName fi) $ mkFuncArgItemSeq fi
, g fi
)
mkFuncArgItemSeq fi = Seq.fromList $
procFuncArgs (fiInputArgs fi) $ \_ t -> FuncArgItem $ G.Name t
getSelPermission :: TableInfo -> RoleName -> Maybe SelPermInfo
getSelPermission :: TableInfo PGColumnInfo -> RoleName -> Maybe SelPermInfo
getSelPermission tabInfo role =
Map.lookup role (tiRolePermInfoMap tabInfo) >>= _permSel
Map.lookup role (_tiRolePermInfoMap tabInfo) >>= _permSel
getSelPerm
:: (MonadError QErr m)
=> TableCache
=> TableCache PGColumnInfo
-- all the fields of a table
-> FieldInfoMap
-> FieldInfoMap PGColumnInfo
-- role and its permission
-> RoleName -> SelPermInfo
-> m (Bool, [SelField])
@ -401,8 +413,8 @@ getSelPerm tableCache fields role selPermInfo = do
mkInsCtx
:: MonadError QErr m
=> RoleName
-> TableCache
-> FieldInfoMap
-> TableCache PGColumnInfo
-> FieldInfoMap PGColumnInfo
-> InsPermInfo
-> Maybe UpdPermInfo
-> m InsCtx
@ -412,7 +424,7 @@ mkInsCtx role tableCache fields insPermInfo updPermM = do
relName = riName relInfo
remoteTableInfo <- getTabInfo tableCache remoteTable
let insPermM = getInsPerm remoteTableInfo role
viewInfoM = tiViewInfo remoteTableInfo
viewInfoM = _tiViewInfo remoteTableInfo
return $ bool Nothing (Just (relName, relInfo)) $
isInsertable insPermM viewInfoM && isValidRel relName remoteTable
@ -433,15 +445,15 @@ mkInsCtx role tableCache fields insPermInfo updPermM = do
mkAdminInsCtx
:: MonadError QErr m
=> QualifiedTable
-> TableCache
-> FieldInfoMap
-> TableCache PGColumnInfo
-> FieldInfoMap PGColumnInfo
-> m InsCtx
mkAdminInsCtx tn tc fields = do
relTupsM <- forM rels $ \relInfo -> do
let remoteTable = riRTable relInfo
relName = riName relInfo
remoteTableInfo <- getTabInfo tc remoteTable
let viewInfoM = tiViewInfo remoteTableInfo
let viewInfoM = _tiViewInfo remoteTableInfo
return $ bool Nothing (Just (relName, relInfo)) $
isMutable viIsInsertable viewInfoM && isValidRel relName remoteTable
@ -456,17 +468,18 @@ mkAdminInsCtx tn tc fields = do
mkGCtxRole
:: (MonadError QErr m)
=> TableCache
=> TableCache PGColumnInfo
-> QualifiedTable
-> FieldInfoMap
-> FieldInfoMap PGColumnInfo
-> [PGCol]
-> [ConstraintName]
-> [FunctionInfo]
-> Maybe ViewInfo
-> Maybe EnumValues
-> RoleName
-> RolePermInfo
-> m (TyAgg, RootFlds, InsCtxMap)
mkGCtxRole tableCache tn fields pCols constraints funcs viM role permInfo = do
-> m (TyAgg, RootFields, InsCtxMap)
mkGCtxRole tableCache tn fields pCols constraints funcs viM enumValuesM role permInfo = do
selPermM <- mapM (getSelPerm tableCache fields role) $ _permSel permInfo
tabInsInfoM <- forM (_permIns permInfo) $ \ipi -> do
ctx <- mkInsCtx role tableCache fields ipi $ _permUpd permInfo
@ -477,7 +490,7 @@ mkGCtxRole tableCache tn fields pCols constraints funcs viM role permInfo = do
insCtxM = fst <$> tabInsInfoM
updColsM = filterColInfos . upiCols <$> _permUpd permInfo
tyAgg = mkGCtxRole' tn insPermM selPermM updColsM
(void $ _permDel permInfo) pColInfos constraints viM funcs
(void $ _permDel permInfo) pColInfos constraints viM funcs enumValuesM
rootFlds = getRootFldsRole tn pCols constraints fields funcs viM permInfo
insCtxMap = maybe Map.empty (Map.singleton tn) insCtxM
return (tyAgg, rootFlds, insCtxMap)
@ -492,11 +505,11 @@ getRootFldsRole
:: QualifiedTable
-> [PGCol]
-> [ConstraintName]
-> FieldInfoMap
-> FieldInfoMap PGColumnInfo
-> [FunctionInfo]
-> Maybe ViewInfo
-> RolePermInfo
-> RootFlds
-> RootFields
getRootFldsRole tn pCols constraints fields funcs viM (RolePermInfo insM selM updM delM) =
getRootFldsRole' tn pCols constraints fields funcs
(mkIns <$> insM) (mkSel <$> selM)
@ -516,21 +529,22 @@ getRootFldsRole tn pCols constraints fields funcs viM (RolePermInfo insM selM up
mkGCtxMapTable
:: (MonadError QErr m)
=> TableCache
=> TableCache PGColumnInfo
-> FunctionCache
-> TableInfo
-> m (Map.HashMap RoleName (TyAgg, RootFlds, InsCtxMap))
-> TableInfo PGColumnInfo
-> m (Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap))
mkGCtxMapTable tableCache funcCache tabInfo = do
m <- Map.traverseWithKey
(mkGCtxRole tableCache tn fields pkeyCols validConstraints tabFuncs viewInfo) rolePerms
(mkGCtxRole tableCache tn fields pkeyCols validConstraints tabFuncs viewInfo enumValues)
rolePerms
adminInsCtx <- mkAdminInsCtx tn tableCache fields
let adminCtx = mkGCtxRole' tn (Just (colInfos, icRelations adminInsCtx))
(Just (True, selFlds)) (Just colInfos) (Just ())
pkeyColInfos validConstraints viewInfo tabFuncs
pkeyColInfos validConstraints viewInfo tabFuncs enumValues
adminInsCtxMap = Map.singleton tn adminInsCtx
return $ Map.insert adminRole (adminCtx, adminRootFlds, adminInsCtxMap) m
where
TableInfo tn _ fields rolePerms constraints pkeyCols viewInfo _ = tabInfo
TableInfo tn _ fields rolePerms constraints pkeyCols viewInfo _ enumValues = tabInfo
validConstraints = mkValidConstraints constraints
colInfos = getValidCols fields
validColNames = map pgiName colInfos
@ -551,7 +565,7 @@ noFilter = annBoolExpTrue
mkGCtxMap
:: (MonadError QErr m)
=> TableCache -> FunctionCache -> m GCtxMap
=> TableCache PGColumnInfo -> FunctionCache -> m GCtxMap
mkGCtxMap tableCache functionCache = do
typesMapL <- mapM (mkGCtxMapTable tableCache functionCache) $
filter tableFltr $ Map.elems tableCache
@ -559,8 +573,8 @@ mkGCtxMap tableCache functionCache = do
return $ flip Map.map typesMap $ \(ty, flds, insCtxMap) ->
mkGCtx ty flds insCtxMap
where
tableFltr ti = not (tiSystemDefined ti)
&& isValidObjectName (tiName ti)
tableFltr ti = not (_tiSystemDefined ti)
&& isValidObjectName (_tiName ti)
-- | build GraphQL schema from postgres tables and functions
buildGCtxMapPG
@ -598,11 +612,15 @@ ppGCtx gCtx =
mRootO = _gMutRoot gCtx
sRootO = _gSubRoot gCtx
-- | A /types aggregate/, which holds role-specific information about visible GraphQL types.
-- Importantly, it holds more than just the information needed by GraphQL: it also includes how the
-- GraphQL types relate to Postgres types, which is used to validate literals provided for
-- Postgres-specific scalars.
data TyAgg
= TyAgg
{ _taTypes :: !TypeMap
, _taFields :: !FieldMap
, _taScalars :: !(Set.HashSet PGColType)
, _taScalars :: !(Set.HashSet PGScalarType)
, _taOrdBy :: !OrdByCtx
} deriving (Show, Eq)
@ -615,21 +633,23 @@ instance Monoid TyAgg where
mempty = TyAgg Map.empty Map.empty Set.empty Map.empty
mappend = (<>)
newtype RootFlds
= RootFlds
{ _taMutation :: Map.HashMap G.Name (OpCtx, Either ObjFldInfo ObjFldInfo)
-- | A role-specific mapping from root field names to allowed operations.
data RootFields
= RootFields
{ rootQueryFields :: !(Map.HashMap G.Name (QueryCtx, ObjFldInfo))
, rootMutationFields :: !(Map.HashMap G.Name (MutationCtx, ObjFldInfo))
} deriving (Show, Eq)
instance Semigroup RootFlds where
(RootFlds m1) <> (RootFlds m2)
= RootFlds (Map.union m1 m2)
instance Semigroup RootFields where
RootFields a1 b1 <> RootFields a2 b2
= RootFields (Map.union a1 a2) (Map.union b1 b2)
instance Monoid RootFlds where
mempty = RootFlds Map.empty
instance Monoid RootFields where
mempty = RootFields Map.empty Map.empty
mappend = (<>)
mkGCtx :: TyAgg -> RootFlds -> InsCtxMap -> GCtx
mkGCtx tyAgg (RootFlds flds) insCtxMap =
mkGCtx :: TyAgg -> RootFields -> InsCtxMap -> GCtx
mkGCtx tyAgg (RootFields queryFields mutationFields) insCtxMap =
let queryRoot = mkQueryRootTyInfo qFlds
scalarTys = map (TIScalar . mkHsraScalarTyInfo) (Set.toList allScalarTypes)
compTys = map (TIInpObj . mkCompExpInp) (Set.toList allComparableTypes)
@ -642,8 +662,8 @@ mkGCtx tyAgg (RootFlds flds) insCtxMap =
] <>
scalarTys <> compTys <> defaultTypes <> wiredInGeoInputTypes
-- for now subscription root is query root
in GCtx allTys fldInfos ordByEnums queryRoot mutRootM subRootM
(Map.map fst flds) insCtxMap
in GCtx allTys fldInfos queryRoot mutRootM subRootM ordByEnums
(Map.map fst queryFields) (Map.map fst mutationFields) insCtxMap
where
TyAgg tyInfos fldInfos scalars ordByEnums = tyAgg
colTys = Set.fromList $ map pgiType $ lefts $ Map.elems fldInfos
@ -655,15 +675,18 @@ mkGCtx tyAgg (RootFlds flds) insCtxMap =
mkHsraObjTyInfo (Just "subscription root")
(G.NamedType "subscription_root") Set.empty . mapFromL _fiName
subRootM = bool (Just $ mkSubRoot qFlds) Nothing $ null qFlds
(qFlds, mFlds) = partitionEithers $ map snd $ Map.elems flds
anyGeoTypes = any isGeoType colTys
qFlds = rootFieldInfos queryFields
mFlds = rootFieldInfos mutationFields
rootFieldInfos = map snd . Map.elems
anyGeoTypes = any (isScalarColumnWhere isGeoType) colTys
allComparableTypes =
if anyGeoTypes
-- due to casting, we need to generate both geometry and geography
-- operations even if just one of the two appears in the schema
then Set.union (Set.fromList [PGGeometry, PGGeography]) colTys
then Set.union (Set.fromList [PGColumnScalar PGGeometry, PGColumnScalar PGGeography]) colTys
else colTys
allScalarTypes = allComparableTypes <> scalars
allScalarTypes = (allComparableTypes ^.. folded._PGColumnScalar) <> scalars
wiredInGeoInputTypes = guard anyGeoTypes *> map TIInpObj geoInputTypes

View File

@ -6,7 +6,6 @@ module Hasura.GraphQL.Schema.BoolExp
, mkBoolExpInp
) where
import qualified Data.Text as T
import qualified Data.HashMap.Strict as Map
import qualified Language.GraphQL.Draft.Syntax as G
@ -16,19 +15,14 @@ import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.SQL.Types
mkCompExpTy :: PGColType -> G.NamedType
mkCompExpTy =
G.NamedType . mkCompExpName
typeToDescription :: G.NamedType -> G.Description
typeToDescription = G.Description . G.unName . G.unNamedType
mkCompExpName :: PGColType -> G.Name
mkCompExpName pgColTy =
G.Name $ T.pack (show pgColTy) <> "_comparison_exp"
mkCompExpTy :: PGColumnType -> G.NamedType
mkCompExpTy = addTypeSuffix "_comparison_exp" . mkColumnType
mkCastExpName :: PGColType -> G.Name
mkCastExpName pgColTy = G.Name $ T.pack (show pgColTy) <> "_cast_exp"
mkCastExpTy :: PGColType -> G.NamedType
mkCastExpTy = G.NamedType . mkCastExpName
mkCastExpTy :: PGColumnType -> G.NamedType
mkCastExpTy = addTypeSuffix "_cast_exp" . mkColumnType
-- TODO(shahidhk) this should ideally be st_d_within_geometry
{-
@ -53,50 +47,46 @@ stDWithinGeographyInpTy = G.NamedType "st_d_within_geography_input"
-- | Makes an input type declaration for the @_cast@ field of a comparison expression.
-- (Currently only used for casting between geometry and geography types.)
mkCastExpressionInputType :: PGColType -> [PGColType] -> InpObjTyInfo
mkCastExpressionInputType :: PGColumnType -> [PGColumnType] -> InpObjTyInfo
mkCastExpressionInputType sourceType targetTypes =
mkHsraInpTyInfo (Just description) (mkCastExpTy sourceType) (fromInpValL targetFields)
where
description = mconcat
[ "Expression to compare the result of casting a column of type "
, G.Description (T.pack $ show sourceType)
, typeToDescription $ mkColumnType sourceType
, ". Multiple cast targets are combined with logical 'AND'."
]
targetFields = map targetField targetTypes
targetField targetType = InpValInfo
Nothing
(G.Name . T.pack $ show targetType)
(G.unNamedType $ mkColumnType targetType)
Nothing
(G.toGT $ mkCompExpTy targetType)
--- | make compare expression input type
mkCompExpInp :: PGColType -> InpObjTyInfo
mkCompExpInp :: PGColumnType -> InpObjTyInfo
mkCompExpInp colTy =
InpObjTyInfo (Just tyDesc) (mkCompExpTy colTy) (fromInpValL $ concat
[ map (mk colScalarTy) typedOps
, map (mk $ G.toLT $ G.toNT colScalarTy) listOps
, bool [] (map (mk $ mkScalarTy PGText) stringOps) isStringTy
, bool [] (map jsonbOpToInpVal jsonbOps) isJsonbTy
, bool [] (stDWithinGeoOpInpVal stDWithinGeometryInpTy :
map geoOpToInpVal (geoOps ++ geomOps)) isGeometryType
, bool [] (stDWithinGeoOpInpVal stDWithinGeographyInpTy :
map geoOpToInpVal geoOps) isGeographyType
[ map (mk colGqlType) typedOps
, map (mk $ G.toLT $ G.toNT colGqlType) listOps
, guard (isScalarWhere isStringType) *> map (mk $ mkScalarTy PGText) stringOps
, guard (isScalarWhere (== PGJSONB)) *> map jsonbOpToInpVal jsonbOps
, guard (isScalarWhere (== PGGeometry)) *>
(stDWithinGeoOpInpVal stDWithinGeometryInpTy : map geoOpToInpVal (geoOps ++ geomOps))
, guard (isScalarWhere (== PGGeography)) *>
(stDWithinGeoOpInpVal stDWithinGeographyInpTy : map geoOpToInpVal geoOps)
, [InpValInfo Nothing "_is_null" Nothing $ G.TypeNamed (G.Nullability True) $ G.NamedType "Boolean"]
, maybeToList castOpInputValue
, castOpInputValues
]) TLHasuraType
where
tyDesc = mconcat
[ "expression to compare columns of type "
, G.Description (T.pack $ show colTy)
, ". All fields are combined with logical 'AND'."
]
isStringTy = case colTy of
PGVarchar -> True
PGText -> True
_ -> False
colGqlType = mkColumnType colTy
colTyDesc = typeToDescription colGqlType
tyDesc =
"expression to compare columns of type " <> colTyDesc
<> ". All fields are combined with logical 'AND'."
isScalarWhere = flip isScalarColumnWhere colTy
mk t n = InpValInfo Nothing n Nothing $ G.toGT t
colScalarTy = mkScalarTy colTy
-- colScalarListTy = GA.GTList colGTy
typedOps =
["_eq", "_neq", "_gt", "_lt", "_gte", "_lte"]
listOps =
@ -109,10 +99,7 @@ mkCompExpInp colTy =
, "_similar", "_nsimilar"
]
isJsonbTy = case colTy of
PGJSONB -> True
_ -> False
jsonbOpToInpVal (op, ty, desc) = InpValInfo (Just desc) op Nothing ty
jsonbOpToInpVal (opName, ty, desc) = InpValInfo (Just desc) opName Nothing ty
jsonbOps =
[ ( "_contains"
, G.toGT $ mkScalarTy PGJSONB
@ -136,9 +123,9 @@ mkCompExpInp colTy =
)
]
castOpInputValue =
castOpInputValues =
-- currently, only geometry/geography types support casting
guard (isGeoType colTy) $>
guard (isScalarWhere isGeoType) $>
InpValInfo Nothing "_cast" Nothing (G.toGT $ mkCastExpTy colTy)
stDWithinGeoOpInpVal ty =
@ -146,20 +133,8 @@ mkCompExpInp colTy =
stDWithinGeoDesc =
"is the column within a distance from a " <> colTyDesc <> " value"
-- Geometry related ops
isGeometryType = case colTy of
PGGeometry -> True
_ -> False
-- Geography related ops
isGeographyType = case colTy of
PGGeography -> True
_ -> False
geoOpToInpVal (op, desc) =
InpValInfo (Just desc) op Nothing $ G.toGT $ mkScalarTy colTy
colTyDesc = G.Description $ T.pack $ show colTy
geoOpToInpVal (opName, desc) =
InpValInfo (Just desc) opName Nothing $ G.toGT colGqlType
-- operators applicable only to geometry types
geomOps :: [(G.Name, G.Description)]
@ -197,12 +172,10 @@ geoInputTypes :: [InpObjTyInfo]
geoInputTypes =
[ stDWithinGeometryInputType
, stDWithinGeographyInputType
, castGeometryInputType
, castGeographyInputType
, mkCastExpressionInputType (PGColumnScalar PGGeometry) [PGColumnScalar PGGeography]
, mkCastExpressionInputType (PGColumnScalar PGGeography) [PGColumnScalar PGGeometry]
]
where
stDWithinGeometryInputType =
mkHsraInpTyInfo Nothing stDWithinGeometryInpTy $ fromInpValL
[ InpValInfo Nothing "from" Nothing $ G.toGT $ G.toNT $ mkScalarTy PGGeometry
@ -216,9 +189,6 @@ geoInputTypes =
Nothing "use_spheroid" (Just $ G.VCBoolean True) $ G.toGT $ mkScalarTy PGBoolean
]
castGeometryInputType = mkCastExpressionInputType PGGeometry [PGGeography]
castGeographyInputType = mkCastExpressionInputType PGGeography [PGGeometry]
mkBoolExpName :: QualifiedTable -> G.Name
mkBoolExpName tn =
qualObjectToName tn <> "_bool_exp"
@ -258,7 +228,7 @@ mkBoolExpInp tn fields =
]
mkFldExpInp = \case
Left (PGColInfo colName colTy _) ->
Left (PGColumnInfo colName colTy _) ->
mk (mkColName colName) (mkCompExpTy colTy)
Right (RelInfo relName _ _ remTab _, _, _, _, _) ->
mk (mkRelName relName) (mkBoolExpTy remTab)

View File

@ -1,14 +1,17 @@
module Hasura.GraphQL.Schema.Common
( qualObjectToName
, addTypeSuffix
, fromInpValL
, mkColName
, mkColumnType
, mkRelName
, mkAggRelName
, SelField
, mkTableTy
, mkTableEnumType
, mkTableAggTy
, mkColumnEnumVal
@ -22,12 +25,14 @@ import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.SQL.Types
type SelField =
Either PGColInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int, Bool)
type SelField = Either PGColumnInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int, Bool)
qualObjectToName :: (ToTxt a) => QualifiedObject a -> G.Name
qualObjectToName = G.Name . snakeCaseQualObject
addTypeSuffix :: Text -> G.NamedType -> G.NamedType
addTypeSuffix suffix baseType = G.NamedType $ G.unNamedType baseType <> G.Name suffix
fromInpValL :: [InpValInfo] -> Map.HashMap G.Name InpValInfo
fromInpValL = mapFromL _iviName
@ -40,13 +45,19 @@ mkAggRelName rn = G.Name $ relNameToTxt rn <> "_aggregate"
mkColName :: PGCol -> G.Name
mkColName (PGCol n) = G.Name n
mkColumnType :: PGColumnType -> G.NamedType
mkColumnType = \case
PGColumnScalar scalarType -> mkScalarTy scalarType
PGColumnEnumReference (EnumReference enumTable _) -> mkTableEnumType enumTable
mkTableTy :: QualifiedTable -> G.NamedType
mkTableTy =
G.NamedType . qualObjectToName
mkTableTy = G.NamedType . qualObjectToName
mkTableEnumType :: QualifiedTable -> G.NamedType
mkTableEnumType = addTypeSuffix "_enum" . mkTableTy
mkTableAggTy :: QualifiedTable -> G.NamedType
mkTableAggTy tn =
G.NamedType $ qualObjectToName tn <> "_aggregate"
mkTableAggTy = addTypeSuffix "_aggregate" . mkTableTy
-- used for 'distinct_on' in select and upsert's 'update columns'
mkColumnEnumVal :: PGCol -> EnumValInfo

View File

@ -36,7 +36,7 @@ input function_args {
procFuncArgs
:: Seq.Seq FunctionArg
-> (PGColType -> Text -> a) -> [a]
-> (PGScalarType -> Text -> a) -> [a]
procFuncArgs argSeq f =
fst $ foldl mkItem ([], 1::Int) argSeq
where

View File

@ -14,10 +14,10 @@ import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.SQL.Types
mkPGColInp :: PGColInfo -> InpValInfo
mkPGColInp (PGColInfo colName colTy _) =
mkPGColInp :: PGColumnInfo -> InpValInfo
mkPGColInp (PGColumnInfo colName colTy _) =
InpValInfo Nothing (G.Name $ getPGColTxt colName) Nothing $
G.toGT $ mkScalarTy colTy
G.toGT $ mkColumnType colTy
-- table_mutation_response
mkMutRespTy :: QualifiedTable -> G.NamedType

View File

@ -105,7 +105,7 @@ input table_insert_input {
-}
mkInsInp
:: QualifiedTable -> [PGColInfo] -> RelationInfoMap -> InpObjTyInfo
:: QualifiedTable -> [PGColumnInfo] -> RelationInfoMap -> InpObjTyInfo
mkInsInp tn insCols relInfoMap =
mkHsraInpTyInfo (Just desc) (mkInsInpTy tn) $ fromInpValL $
map mkPGColInp insCols <> relInps
@ -177,11 +177,11 @@ mkInsMutFld tn isUpsertable =
onConflictArg =
InpValInfo (Just onConflictDesc) "on_conflict" Nothing $ G.toGT $ mkOnConflictInpTy tn
mkConstriantTy :: QualifiedTable -> [ConstraintName] -> EnumTyInfo
mkConstriantTy tn cons = enumTyInfo
mkConstraintTy :: QualifiedTable -> [ConstraintName] -> EnumTyInfo
mkConstraintTy tn cons = enumTyInfo
where
enumTyInfo = mkHsraEnumTyInfo (Just desc) (mkConstraintInpTy tn) $
mapFromL _eviVal $ map mkConstraintEnumVal cons
EnumValuesSynthetic . mapFromL _eviVal $ map mkConstraintEnumVal cons
desc = G.Description $
"unique or primary key constraints on table " <>> tn
@ -194,15 +194,15 @@ mkUpdColumnTy :: QualifiedTable -> [PGCol] -> EnumTyInfo
mkUpdColumnTy tn cols = enumTyInfo
where
enumTyInfo = mkHsraEnumTyInfo (Just desc) (mkUpdColumnInpTy tn) $
mapFromL _eviVal $ map mkColumnEnumVal cols
EnumValuesSynthetic . mapFromL _eviVal $ map mkColumnEnumVal cols
desc = G.Description $
"update columns of table " <>> tn
mkConflictActionTy :: Bool -> EnumTyInfo
mkConflictActionTy updAllowed =
mkHsraEnumTyInfo (Just desc) conflictActionTy $ mapFromL _eviVal $
[enumValIgnore] <> bool [] [enumValUpdate] updAllowed
mkHsraEnumTyInfo (Just desc) conflictActionTy $
EnumValuesSynthetic . mapFromL _eviVal $ [enumValIgnore] <> bool [] [enumValUpdate] updAllowed
where
desc = G.Description "conflict action"
enumValIgnore = EnumValInfo (Just "ignore the insert on this row")
@ -216,7 +216,7 @@ mkOnConflictTypes tn uniqueOrPrimaryCons cols =
bool [] tyInfos
where
tyInfos = [ TIEnum $ mkConflictActionTy isUpdAllowed
, TIEnum $ mkConstriantTy tn uniqueOrPrimaryCons
, TIEnum $ mkConstraintTy tn uniqueOrPrimaryCons
, TIEnum $ mkUpdColumnTy tn cols
, TIInpObj $ mkOnConflictInp tn
]

View File

@ -30,7 +30,7 @@ input table_set_input {
}
-}
mkUpdSetInp
:: QualifiedTable -> [PGColInfo] -> InpObjTyInfo
:: QualifiedTable -> [PGColumnInfo] -> InpObjTyInfo
mkUpdSetInp tn cols =
mkHsraInpTyInfo (Just desc) (mkUpdSetTy tn) $
fromInpValL $ map mkPGColInp cols
@ -53,7 +53,7 @@ input table_inc_input {
-}
mkUpdIncInp
:: QualifiedTable -> Maybe [PGColInfo] -> Maybe InpObjTyInfo
:: QualifiedTable -> Maybe [PGColumnInfo] -> Maybe InpObjTyInfo
mkUpdIncInp tn = maybe Nothing mkType
where
mkType cols = let intCols = onlyIntCols cols
@ -141,7 +141,7 @@ deleteAtPathDesc = "delete the field or element with specified path"
<> " (for JSON arrays, negative integers count from the end)"
mkUpdJSONOpInp
:: QualifiedTable -> [PGColInfo] -> [InpObjTyInfo]
:: QualifiedTable -> [PGColumnInfo] -> [InpObjTyInfo]
mkUpdJSONOpInp tn cols = bool inpObjs [] $ null jsonbCols
where
jsonbCols = onlyJSONBCols cols
@ -191,7 +191,7 @@ update_table(
-}
mkIncInpVal :: QualifiedTable -> [PGColInfo] -> Maybe InpValInfo
mkIncInpVal :: QualifiedTable -> [PGColumnInfo] -> Maybe InpValInfo
mkIncInpVal tn cols = bool (Just incArg) Nothing $ null intCols
where
intCols = onlyIntCols cols
@ -199,7 +199,7 @@ mkIncInpVal tn cols = bool (Just incArg) Nothing $ null intCols
incArg =
InpValInfo (Just incArgDesc) "_inc" Nothing $ G.toGT $ mkUpdIncTy tn
mkJSONOpInpVals :: QualifiedTable -> [PGColInfo] -> [InpValInfo]
mkJSONOpInpVals :: QualifiedTable -> [PGColumnInfo] -> [InpValInfo]
mkJSONOpInpVals tn cols = bool jsonbOpArgs [] $ null jsonbCols
where
jsonbCols = onlyJSONBCols cols
@ -224,7 +224,7 @@ mkJSONOpInpVals tn cols = bool jsonbOpArgs [] $ null jsonbCols
G.toGT $ mkJSONOpTy tn deleteAtPathOp
mkUpdMutFld
:: QualifiedTable -> [PGColInfo] -> ObjFldInfo
:: QualifiedTable -> [PGColumnInfo] -> ObjFldInfo
mkUpdMutFld tn cols =
mkHsraObjFldInfo (Just desc) fldName (fromInpValL inputValues) $
G.toGT $ mkMutRespTy tn

View File

@ -21,8 +21,8 @@ ordByTy = G.NamedType "order_by"
ordByEnumTy :: EnumTyInfo
ordByEnumTy =
mkHsraEnumTyInfo (Just desc) ordByTy $ mapFromL _eviVal $
map mkEnumVal enumVals
mkHsraEnumTyInfo (Just desc) ordByTy $
EnumValuesSynthetic . mapFromL _eviVal $ map mkEnumVal enumVals
where
desc = G.Description "column ordering options"
mkEnumVal (n, d) =

View File

@ -28,7 +28,7 @@ mkSelColumnTy :: QualifiedTable -> [PGCol] -> EnumTyInfo
mkSelColumnTy tn cols = enumTyInfo
where
enumTyInfo = mkHsraEnumTyInfo (Just desc) (mkSelColumnInpTy tn) $
mapFromL _eviVal $ map mkColumnEnumVal cols
EnumValuesSynthetic . mapFromL _eviVal $ map mkColumnEnumVal cols
desc = G.Description $
"select columns of table " <>> tn
@ -39,8 +39,7 @@ mkSelColumnInpTy tn =
G.NamedType $ qualObjectToName tn <> "_select_column"
mkTableAggFldsTy :: QualifiedTable -> G.NamedType
mkTableAggFldsTy tn =
G.NamedType $ qualObjectToName tn <> "_aggregate_fields"
mkTableAggFldsTy = addTypeSuffix "_aggregate_fields" . mkTableTy
mkTableColAggFldsTy :: G.Name -> QualifiedTable -> G.NamedType
mkTableColAggFldsTy op tn =
@ -50,27 +49,23 @@ mkTableByPkName :: QualifiedTable -> G.Name
mkTableByPkName tn = qualObjectToName tn <> "_by_pk"
-- Support argument params for PG columns
mkPGColParams :: PGColType -> ParamMap
mkPGColParams = \case
PGJSONB -> jsonParams
PGJSON -> jsonParams
_ -> Map.empty
where
pathDesc = "JSON select path"
jsonParams = Map.fromList
[ (G.Name "path", InpValInfo (Just pathDesc) "path" Nothing $
G.toGT $ mkScalarTy PGText)
]
mkPGColParams :: PGColumnType -> ParamMap
mkPGColParams colType
| isScalarColumnWhere isJSONType colType =
let pathDesc = "JSON select path"
in Map.fromList
[ (G.Name "path", InpValInfo (Just pathDesc) "path" Nothing $ G.toGT $ mkScalarTy PGText) ]
| otherwise = Map.empty
mkPGColFld :: PGColInfo -> ObjFldInfo
mkPGColFld (PGColInfo colName colTy isNullable) =
mkPGColFld :: PGColumnInfo -> ObjFldInfo
mkPGColFld (PGColumnInfo colName colTy isNullable) =
mkHsraObjFldInfo Nothing n (mkPGColParams colTy) ty
where
n = G.Name $ getPGColTxt colName
ty = bool notNullTy nullTy isNullable
scalarTy = mkScalarTy colTy
notNullTy = G.toGT $ G.toNT scalarTy
nullTy = G.toGT scalarTy
columnType = mkColumnType colTy
notNullTy = G.toGT $ G.toNT columnType
nullTy = G.toGT columnType
-- where: table_bool_exp
-- limit: Int
@ -222,8 +217,8 @@ type table_<agg-op>_fields{
mkTableColAggFldsObj
:: QualifiedTable
-> G.Name
-> (PGColType -> G.NamedType)
-> [PGColInfo]
-> (PGColumnType -> G.NamedType)
-> [PGColumnInfo]
-> ObjTyInfo
mkTableColAggFldsObj tn op f cols =
mkHsraObjTyInfo (Just desc) (mkTableColAggFldsTy op tn) Set.empty $ mapFromL _fiName $
@ -263,7 +258,7 @@ table_by_pk(
): table
-}
mkSelFldPKey
:: QualifiedTable -> [PGColInfo]
:: QualifiedTable -> [PGColumnInfo]
-> ObjFldInfo
mkSelFldPKey tn cols =
mkHsraObjFldInfo (Just desc) fldName args ty
@ -273,8 +268,8 @@ mkSelFldPKey tn cols =
fldName = mkTableByPkName tn
args = fromInpValL $ map colInpVal cols
ty = G.toGT $ mkTableTy tn
colInpVal (PGColInfo n typ _) =
InpValInfo Nothing (mkColName n) Nothing $ G.toGT $ G.toNT $ mkScalarTy typ
colInpVal (PGColumnInfo n typ _) =
InpValInfo Nothing (mkColName n) Nothing $ G.toGT $ G.toNT $ mkColumnType typ
{-

View File

@ -33,9 +33,8 @@ import Hasura.GraphQL.Validate.Types
import Hasura.RQL.Types
import Hasura.RQL.Types.QueryCollection
import Hasura.SQL.Types (PGColType)
import Hasura.SQL.Value (PGColValue,
parsePGValue)
import Hasura.SQL.Types (WithScalarType)
import Hasura.SQL.Value (PGScalarValue)
data QueryParts
= QueryParts
@ -118,8 +117,8 @@ getAnnVarVals varDefsL inpVals = withPathK "variableValues" $ do
showVars :: (Functor f, Foldable f) => f G.Variable -> Text
showVars = showNames . fmap G.unVariable
type VarPGTypes = Map.HashMap G.Variable PGColType
type AnnPGVarVals = Map.HashMap G.Variable (PGColType, PGColValue)
type VarPGTypes = Map.HashMap G.Variable PGColumnType
type AnnPGVarVals = Map.HashMap G.Variable (WithScalarType PGScalarValue)
-- this is in similar spirit to getAnnVarVals, however
-- here it is much simpler and can get rid of typemap requirement
@ -142,7 +141,7 @@ getAnnPGVarVals varTypes varValsM =
-- TODO: we don't have the graphql type
-- " of type: " <> T.pack (show varType) <>
" in variableValues"
(varType,) <$> runAesonParser (parsePGValue varType) varVal
parsePGScalarValue varType varVal
where
varVals = fromMaybe Map.empty varValsM

View File

@ -26,7 +26,7 @@ import Hasura.SQL.Value
-- data ScalarInfo
-- = SIBuiltin !GBuiltin
-- | SICustom !PGColType
-- | SICustom !PGScalarType
-- deriving (Show, Eq)
-- data GBuiltin

View File

@ -19,6 +19,8 @@ import qualified Data.Text as T
import qualified Data.Vector as V
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Hasura.RQL.Types as RQL
import Hasura.GraphQL.Utils
import Hasura.GraphQL.Validate.Context
import Hasura.GraphQL.Validate.Types
@ -249,21 +251,27 @@ validateNamedTypeVal inpValParser (nullability, nt) val = do
fmap (AGObject nt) . mapM (validateObject inpValParser ioti)
TIEnum eti ->
withParsed gType (getEnum inpValParser) val $
fmap (AGEnum nt) . mapM (validateEnum eti)
fmap (AGEnum nt) . validateEnum eti
TIScalar (ScalarTyInfo _ pgColTy _) ->
withParsed gType (getScalar inpValParser) val $
fmap (AGScalar pgColTy) . mapM (validateScalar pgColTy)
where
throwUnexpTypeErr ty = throw500 $ "unexpected " <> ty <> " type info for: "
<> showNamedTy nt
validateEnum enumTyInfo enumVal =
if Map.member enumVal (_etiValues enumTyInfo)
then return enumVal
else throwVE $ "unexpected value " <>
showName (G.unEnumValue enumVal) <>
" for enum: " <> showNamedTy nt
validateScalar pgColTy =
runAesonParser (parsePGValue pgColTy)
validateEnum enumTyInfo maybeEnumValue = case (_etiValues enumTyInfo, maybeEnumValue) of
(EnumValuesSynthetic _, Nothing) -> pure $ AGESynthetic Nothing
(EnumValuesReference reference, Nothing) -> pure $ AGEReference reference Nothing
(EnumValuesSynthetic values, Just enumValue)
| Map.member enumValue values -> pure $ AGESynthetic (Just enumValue)
(EnumValuesReference reference@(EnumReference _ values), Just enumValue)
| rqlEnumValue <- RQL.EnumValue . G.unName $ G.unEnumValue enumValue
, Map.member rqlEnumValue values
-> pure $ AGEReference reference (Just rqlEnumValue)
(_, Just enumValue) -> throwVE $
"unexpected value " <> showName (G.unEnumValue enumValue) <> " for enum: " <> showNamedTy nt
validateScalar pgColTy = runAesonParser (parsePGValue pgColTy)
gType = G.TypeNamed nullability nt
validateList

View File

@ -21,6 +21,8 @@ module Hasura.GraphQL.Validate.Types
, EnumTyInfo(..)
, mkHsraEnumTyInfo
, EnumValuesInfo(..)
, normalizeEnumValues
, EnumValInfo(..)
, InpObjFldMap
, InpObjTyInfo(..)
@ -52,6 +54,7 @@ module Hasura.GraphQL.Validate.Types
, TypeLoc (..)
, typeEq
, AnnGValue(..)
, AnnGEnumValue(..)
, AnnGObject
, hasNullVal
, getAnnInpValKind
@ -60,7 +63,6 @@ module Hasura.GraphQL.Validate.Types
) where
import Hasura.Prelude
import Instances.TH.Lift ()
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
@ -73,14 +75,15 @@ import qualified Language.GraphQL.Draft.Syntax as G
import qualified Language.GraphQL.Draft.TH as G
import qualified Language.Haskell.TH.Syntax as TH
import qualified Hasura.RQL.Types.Column as RQL
import Hasura.GraphQL.Utils
import Hasura.RQL.Instances ()
import Hasura.RQL.Types.RemoteSchema
import Hasura.SQL.Types
import Hasura.SQL.Value
-- | Typeclass for equating relevant properties of various GraphQL types
-- | defined below
-- | Typeclass for equating relevant properties of various GraphQL types defined below
class EquatableGType a where
type EqProps a
getEqProps :: a -> EqProps a
@ -99,21 +102,39 @@ fromEnumValDef :: G.EnumValueDefinition -> EnumValInfo
fromEnumValDef (G.EnumValueDefinition descM val _) =
EnumValInfo descM val False
data EnumValuesInfo
= EnumValuesSynthetic !(Map.HashMap G.EnumValue EnumValInfo)
-- ^ Values for an enum that exists only in the GraphQL schema and does not have any external
-- source of truth.
| EnumValuesReference !RQL.EnumReference
-- ^ Values for an enum that is backed by an enum table reference (see "Hasura.RQL.Schema.Enum").
deriving (Show, Eq, TH.Lift)
normalizeEnumValues :: EnumValuesInfo -> Map.HashMap G.EnumValue EnumValInfo
normalizeEnumValues = \case
EnumValuesSynthetic values -> values
EnumValuesReference (RQL.EnumReference _ values) ->
mapFromL _eviVal . flip map (Map.toList values) $
\(RQL.EnumValue name, RQL.EnumValueInfo maybeDescription) -> EnumValInfo
{ _eviVal = G.EnumValue $ G.Name name
, _eviDesc = G.Description <$> maybeDescription
, _eviIsDeprecated = False }
data EnumTyInfo
= EnumTyInfo
{ _etiDesc :: !(Maybe G.Description)
, _etiName :: !G.NamedType
, _etiValues :: !(Map.HashMap G.EnumValue EnumValInfo)
, _etiValues :: !EnumValuesInfo
, _etiLoc :: !TypeLoc
} deriving (Show, Eq, TH.Lift)
instance EquatableGType EnumTyInfo where
type EqProps EnumTyInfo = (G.NamedType, Map.HashMap G.EnumValue EnumValInfo)
getEqProps ety = (,) (_etiName ety) (_etiValues ety)
getEqProps ety = (,) (_etiName ety) (normalizeEnumValues $ _etiValues ety)
fromEnumTyDef :: G.EnumTypeDefinition -> TypeLoc -> EnumTyInfo
fromEnumTyDef (G.EnumTypeDefinition descM n _ valDefs) loc =
EnumTyInfo descM (G.NamedType n) enumVals loc
EnumTyInfo descM (G.NamedType n) (EnumValuesSynthetic enumVals) loc
where
enumVals = Map.fromList
[(G._evdName valDef, fromEnumValDef valDef) | valDef <- valDefs]
@ -121,7 +142,7 @@ fromEnumTyDef (G.EnumTypeDefinition descM n _ valDefs) loc =
mkHsraEnumTyInfo
:: Maybe G.Description
-> G.NamedType
-> Map.HashMap G.EnumValue EnumValInfo
-> EnumValuesInfo
-> EnumTyInfo
mkHsraEnumTyInfo descM ty enumVals =
EnumTyInfo descM ty enumVals TLHasuraType
@ -324,15 +345,15 @@ mkHsraInpTyInfo descM ty flds =
data ScalarTyInfo
= ScalarTyInfo
{ _stiDesc :: !(Maybe G.Description)
, _stiType :: !PGColType
, _stiType :: !PGScalarType
, _stiLoc :: !TypeLoc
} deriving (Show, Eq, TH.Lift)
mkHsraScalarTyInfo :: PGColType -> ScalarTyInfo
mkHsraScalarTyInfo :: PGScalarType -> ScalarTyInfo
mkHsraScalarTyInfo ty = ScalarTyInfo Nothing ty TLHasuraType
instance EquatableGType ScalarTyInfo where
type EqProps ScalarTyInfo = PGColType
type EqProps ScalarTyInfo = PGScalarType
getEqProps = _stiType
fromScalarTyDef
@ -546,16 +567,16 @@ isSubTypeBase subTyInfo supTyInfo = case (subTyInfo,supTyInfo) of
notSubTyErr = throwError $ "Type " <> showTy subTyInfo <> " is not a sub type of " <> showTy supTyInfo
-- map postgres types to builtin scalars
pgColTyToScalar :: PGColType -> Text
pgColTyToScalar :: PGScalarType -> Text
pgColTyToScalar = \case
PGInteger -> "Int"
PGBoolean -> "Boolean"
PGFloat -> "Float"
PGText -> "String"
PGVarchar -> "String"
t -> T.pack $ show t
t -> toSQLTxt t
mkScalarTy :: PGColType -> G.NamedType
mkScalarTy :: PGScalarType -> G.NamedType
mkScalarTy =
G.NamedType . G.Name . pgColTyToScalar
@ -659,9 +680,15 @@ data AnnInpVal
type AnnGObject = OMap.InsOrdHashMap G.Name AnnInpVal
-- | See 'EnumValuesInfo' for information about what these cases mean.
data AnnGEnumValue
= AGESynthetic !(Maybe G.EnumValue)
| AGEReference !RQL.EnumReference !(Maybe RQL.EnumValue)
deriving (Show, Eq)
data AnnGValue
= AGScalar !PGColType !(Maybe PGColValue)
| AGEnum !G.NamedType !(Maybe G.EnumValue)
= AGScalar !PGScalarType !(Maybe PGScalarValue)
| AGEnum !G.NamedType !AnnGEnumValue
| AGObject !G.NamedType !(Maybe AnnGObject)
| AGArray !G.ListType !(Maybe [AnnInpVal])
deriving (Show, Eq)
@ -678,11 +705,12 @@ instance J.ToJSON AnnGValue where
hasNullVal :: AnnGValue -> Bool
hasNullVal = \case
AGScalar _ Nothing -> True
AGEnum _ Nothing -> True
AGObject _ Nothing -> True
AGArray _ Nothing -> True
_ -> False
AGScalar _ Nothing -> True
AGEnum _ (AGESynthetic Nothing) -> True
AGEnum _ (AGEReference _ Nothing) -> True
AGObject _ Nothing -> True
AGArray _ Nothing -> True
_ -> False
getAnnInpValKind :: AnnGValue -> Text
getAnnInpValKind = \case

View File

@ -3,11 +3,12 @@ module Hasura.Prelude
, onNothing
, onJust
, onLeft
, choice
, bsToTxt
, txtToBs
) where
import Control.Applicative as M ((<|>))
import Control.Applicative as M (Alternative (..))
import Control.Monad as M (void, when)
import Control.Monad.Base as M
import Control.Monad.Except as M
@ -19,7 +20,9 @@ import Data.Bool as M (bool)
import Data.Data as M (Data (..))
import Data.Either as M (lefts, partitionEithers,
rights)
import Data.Foldable as M (foldrM, toList)
import Data.Foldable as M (foldrM, for_, toList,
traverse_)
import Data.Function as M (on, (&))
import Data.Functor as M (($>), (<&>))
import Data.Hashable as M (Hashable)
import Data.List as M (find, foldl', group,
@ -33,6 +36,7 @@ import Data.Ord as M (comparing)
import Data.Semigroup as M (Semigroup (..))
import Data.String as M (IsString)
import Data.Text as M (Text)
import Data.Traversable as M (for)
import Data.Word as M (Word64)
import GHC.Generics as M (Generic)
import Prelude as M hiding (fail, init, lookup)
@ -51,6 +55,9 @@ onJust m action = maybe (return ()) action m
onLeft :: (Monad m) => Either e a -> (e -> m a) -> m a
onLeft e f = either f return e
choice :: (Alternative f) => [f a] -> f a
choice = foldr (<|>) empty
bsToTxt :: B.ByteString -> Text
bsToTxt = TE.decodeUtf8With TE.lenientDecode

View File

@ -55,7 +55,7 @@ getTriggerSql
:: Ops
-> TriggerName
-> QualifiedTable
-> [PGColInfo]
-> [PGColumnInfo]
-> Bool
-> SubscribeOpSpec
-> Maybe T.Text
@ -118,7 +118,7 @@ getTriggerSql op trn qt allCols strfyNum spec =
mkAllTriggersQ
:: TriggerName
-> QualifiedTable
-> [PGColInfo]
-> [PGColumnInfo]
-> Bool
-> TriggerOpsDef
-> Q.TxE QErr ()
@ -133,7 +133,7 @@ mkAllTriggersQ trn qt allCols strfyNum fullspec = do
mkTriggerQ
:: TriggerName
-> QualifiedTable
-> [PGColInfo]
-> [PGColumnInfo]
-> Bool
-> Ops
-> SubscribeOpSpec
@ -151,7 +151,7 @@ delTriggerQ trn = mapM_ (\op -> Q.unitQE
addEventTriggerToCatalog
:: QualifiedTable
-> [PGColInfo]
-> [PGColumnInfo]
-> Bool
-> EventTriggerConf
-> Q.TxE QErr ()
@ -179,7 +179,7 @@ delEventTriggerFromCatalog trn = do
updateEventTriggerToCatalog
:: QualifiedTable
-> [PGColInfo]
-> [PGColumnInfo]
-> Bool
-> EventTriggerConf
-> Q.TxE QErr ()
@ -228,7 +228,7 @@ subTableP1 (CreateEventTriggerQuery name qt insert update delete enableManual re
-- can only replace for same table
when replace $ do
ti' <- askTabInfoFromTrigger name
when (tiName ti' /= tiName ti) $ throw400 NotSupported "cannot replace table or schema for trigger"
when (_tiName ti' /= _tiName ti) $ throw400 NotSupported "cannot replace table or schema for trigger"
assertCols ti insert
assertCols ti update
@ -242,7 +242,7 @@ subTableP1 (CreateEventTriggerQuery name qt insert update delete enableManual re
let cols = sosColumns sos
case cols of
SubCStar -> return ()
SubCArray pgcols -> forM_ pgcols (assertPGCol (tiFieldInfoMap ti) "")
SubCArray pgcols -> forM_ pgcols (assertPGCol (_tiFieldInfoMap ti) "")
--(QErrM m, CacheRWM m, MonadTx m, MonadIO m)
@ -285,7 +285,7 @@ subTableP2
:: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasSQLGenCtx m)
=> QualifiedTable -> Bool -> EventTriggerConf -> m ()
subTableP2 qt replace etc = do
allCols <- getCols . tiFieldInfoMap <$> askTabInfo qt
allCols <- getCols . _tiFieldInfoMap <$> askTabInfo qt
strfyNum <- stringifyNum <$> askSQLGenCtx
if replace
then do
@ -309,7 +309,7 @@ unsubTableP1
unsubTableP1 (DeleteEventTriggerQuery name) = do
adminOnly
ti <- askTabInfoFromTrigger name
return $ tiName ti
return $ _tiName ti
unsubTableP2
:: (QErrM m, CacheRWM m, MonadTx m)
@ -363,7 +363,7 @@ runInvokeEventTrigger (InvokeEventTriggerQuery name payload) = do
trigInfo <- askEventTriggerInfo name
assertManual $ etiOpsDef trigInfo
ti <- askTabInfoFromTrigger name
eid <-liftTx $ insertManualEvent (tiName ti) name payload
eid <-liftTx $ insertManualEvent (_tiName ti) name payload
return $ encJFromJValue $ object ["event_id" .= eid]
where
assertManual (TriggerOpsDef _ _ _ man) = case man of

View File

@ -47,14 +47,14 @@ import qualified Hasura.RQL.DDL.Permission.Internal as DP
import qualified Hasura.RQL.DDL.QueryCollection as DQC
import qualified Hasura.RQL.DDL.Relationship as DR
import qualified Hasura.RQL.DDL.RemoteSchema as DRS
import qualified Hasura.RQL.DDL.Schema.Function as DF
import qualified Hasura.RQL.DDL.Schema.Table as DT
import qualified Hasura.RQL.DDL.Schema as DS
import qualified Hasura.RQL.Types.EventTrigger as DTS
import qualified Hasura.RQL.Types.RemoteSchema as TRS
data TableMeta
= TableMeta
{ _tmTable :: !QualifiedTable
, _tmIsEnum :: !Bool
, _tmObjectRelationships :: ![DR.ObjRelDef]
, _tmArrayRelationships :: ![DR.ArrRelDef]
, _tmInsertPermissions :: ![DP.InsPermDef]
@ -64,9 +64,9 @@ data TableMeta
, _tmEventTriggers :: ![DTS.EventTriggerConf]
} deriving (Show, Eq, Lift)
mkTableMeta :: QualifiedTable -> TableMeta
mkTableMeta qt =
TableMeta qt [] [] [] [] [] [] []
mkTableMeta :: QualifiedTable -> Bool -> TableMeta
mkTableMeta qt isEnum =
TableMeta qt isEnum [] [] [] [] [] [] []
makeLenses ''TableMeta
@ -78,6 +78,7 @@ instance FromJSON TableMeta where
TableMeta
<$> o .: tableKey
<*> o .:? isEnumKey .!= False
<*> o .:? orKey .!= []
<*> o .:? arKey .!= []
<*> o .:? ipKey .!= []
@ -88,6 +89,7 @@ instance FromJSON TableMeta where
where
tableKey = "table"
isEnumKey = "is_enum"
orKey = "object_relationships"
arKey = "array_relationships"
ipKey = "insert_permissions"
@ -100,8 +102,8 @@ instance FromJSON TableMeta where
HS.fromList (M.keys o) `HS.difference` expectedKeySet
expectedKeySet =
HS.fromList [ tableKey, orKey, arKey, ipKey
, spKey, upKey, dpKey, etKey
HS.fromList [ tableKey, isEnumKey, orKey, arKey
, ipKey, spKey, upKey, dpKey, etKey
]
parseJSON _ =
@ -136,7 +138,7 @@ runClearMetadata
runClearMetadata _ = do
adminOnly
liftTx clearMetadata
DT.buildSchemaCacheStrict
DS.buildSchemaCacheStrict
return successMsg
data ReplaceMetadata
@ -220,13 +222,16 @@ applyQP2
applyQP2 (ReplaceMetadata tables mFunctions mSchemas mCollections mAllowlist) = do
liftTx clearMetadata
DT.buildSchemaCacheStrict
DS.buildSchemaCacheStrict
withPathK "tables" $ do
-- tables and views
indexedForM_ (map _tmTable tables) $ \tableName ->
void $ DT.trackExistingTableOrViewP2 tableName False
indexedForM_ tables $ \tableMeta -> do
let trackQuery = DS.TrackTable
{ DS.tName = tableMeta ^. tmTable
, DS.tIsEnum = tableMeta ^. tmIsEnum }
void $ DS.trackExistingTableOrViewP2 trackQuery
-- Relationships
indexedForM_ tables $ \table -> do
@ -257,7 +262,7 @@ applyQP2 (ReplaceMetadata tables mFunctions mSchemas mCollections mAllowlist) =
-- sql functions
withPathK "functions" $
indexedMapM_ (void . DF.trackFunctionP2) functions
indexedMapM_ (void . DS.trackFunctionP2) functions
-- query collections
withPathK "query_collections" $
@ -288,7 +293,7 @@ applyQP2 (ReplaceMetadata tables mFunctions mSchemas mCollections mAllowlist) =
processPerms tabInfo perms =
indexedForM_ perms $ \permDef -> do
permInfo <- DP.addPermP1 tabInfo permDef
DP.addPermP2 (tiName tabInfo) permDef permInfo
DP.addPermP2 (_tiName tabInfo) permDef permInfo
runReplaceMetadata
:: ( QErrM m, UserInfoM m, CacheRWM m, MonadTx m
@ -311,8 +316,9 @@ $(deriveToJSON defaultOptions ''ExportMetadata)
fetchMetadata :: Q.TxE QErr ReplaceMetadata
fetchMetadata = do
tables <- Q.catchE defaultTxErrorHandler fetchTables
let qts = map (uncurry QualifiedObject) tables
tableMetaMap = M.fromList $ zip qts $ map mkTableMeta qts
let tableMetaMap = M.fromList . flip map tables $ \(schema, name, isEnum) ->
let qualifiedName = QualifiedObject schema name
in (qualifiedName, mkTableMeta qualifiedName isEnum)
-- Fetch all the relationships
relationships <- Q.catchE defaultTxErrorHandler fetchRelationships
@ -384,7 +390,7 @@ fetchMetadata = do
fetchTables =
Q.listQ [Q.sql|
SELECT table_schema, table_name from hdb_catalog.hdb_table
SELECT table_schema, table_name, is_enum from hdb_catalog.hdb_table
WHERE is_system_defined = 'false'
|] () False
@ -437,7 +443,7 @@ runReloadMetadata
=> ReloadMetadata -> m EncJSON
runReloadMetadata _ = do
adminOnly
DT.buildSchemaCache
DS.buildSchemaCache
return successMsg
data DumpInternalState
@ -499,9 +505,8 @@ runDropInconsistentMetadata _ = do
purgeMetadataObj :: MonadTx m => MetadataObjId -> m ()
purgeMetadataObj = liftTx . \case
(MOTable qt) ->
Q.catchE defaultTxErrorHandler $ DT.delTableFromCatalog qt
(MOFunction qf) -> DF.delFunctionFromCatalog qf
(MOTable qt) -> DS.deleteTableFromCatalog qt
(MOFunction qf) -> DS.delFunctionFromCatalog qf
(MORemoteSchema rsn) -> DRS.removeRemoteSchemaFromCatalog rsn
(MOTableObj qt (MTORel rn _)) -> DR.delRelFromCatalog qt rn
(MOTableObj qt (MTOPerm rn pt)) -> DP.dropPermFromCatalog qt rn pt

View File

@ -108,20 +108,20 @@ dropView vn =
procSetObj
:: (QErrM m)
=> TableInfo -> Maybe ColVals
=> TableInfo PGColumnInfo -> Maybe ColVals
-> m (PreSetColsPartial, [Text], [SchemaDependency])
procSetObj ti mObj = do
(setColTups, deps) <- withPathK "set" $
fmap unzip $ forM (HM.toList setObj) $ \(pgCol, val) -> do
ty <- askPGType fieldInfoMap pgCol $
"column " <> pgCol <<> " not found in table " <>> tn
sqlExp <- valueParser (PgTypeSimple ty) val
sqlExp <- valueParser (PGTypeScalar ty) val
let dep = mkColDep (getDepReason sqlExp) tn pgCol
return ((pgCol, sqlExp), dep)
return (HM.fromList setColTups, depHeaders, deps)
where
fieldInfoMap = tiFieldInfoMap ti
tn = tiName ti
fieldInfoMap = _tiFieldInfoMap ti
tn = _tiName ti
setObj = fromMaybe mempty mObj
depHeaders = getDepHeadersFromVal $ Object $
HM.fromList $ map (first getPGColTxt) $ HM.toList setObj
@ -130,7 +130,7 @@ procSetObj ti mObj = do
buildInsPermInfo
:: (QErrM m, CacheRM m)
=> TableInfo
=> TableInfo PGColumnInfo
-> PermDef InsPerm
-> m (WithDeps InsPermInfo)
buildInsPermInfo tabInfo (PermDef rn (InsPerm chk set mCols) _) =
@ -148,8 +148,8 @@ buildInsPermInfo tabInfo (PermDef rn (InsPerm chk set mCols) _) =
insColsWithoutPresets = insCols \\ HM.keys setColsSQL
return (InsPermInfo (HS.fromList insColsWithoutPresets) vn be setColsSQL reqHdrs, deps)
where
fieldInfoMap = tiFieldInfoMap tabInfo
tn = tiName tabInfo
fieldInfoMap = _tiFieldInfoMap tabInfo
tn = _tiName tabInfo
vn = buildViewName tn rn PTInsert
allCols = map pgiName $ getCols fieldInfoMap
insCols = fromMaybe allCols $ convColSpec fieldInfoMap <$> mCols
@ -213,7 +213,7 @@ $(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''SelPerm)
buildSelPermInfo
:: (QErrM m, CacheRM m)
=> TableInfo
=> TableInfo PGColumnInfo
-> SelPerm
-> m (WithDeps SelPermInfo)
buildSelPermInfo tabInfo sp = do
@ -235,8 +235,8 @@ buildSelPermInfo tabInfo sp = do
return (SelPermInfo (HS.fromList pgCols) tn be mLimit allowAgg depHeaders, deps)
where
tn = tiName tabInfo
fieldInfoMap = tiFieldInfoMap tabInfo
tn = _tiName tabInfo
fieldInfoMap = _tiFieldInfoMap tabInfo
allowAgg = or $ spAllowAggregations sp
autoInferredErr = "permissions for relationships are automatically inferred"
@ -283,7 +283,7 @@ type CreateUpdPerm = CreatePerm UpdPerm
buildUpdPermInfo
:: (QErrM m, CacheRM m)
=> TableInfo
=> TableInfo PGColumnInfo
-> UpdPerm
-> m (WithDeps UpdPermInfo)
buildUpdPermInfo tabInfo (UpdPerm colSpec set fltr) = do
@ -305,8 +305,8 @@ buildUpdPermInfo tabInfo (UpdPerm colSpec set fltr) = do
return (UpdPermInfo (HS.fromList updColsWithoutPreSets) tn be setColsSQL reqHeaders, deps)
where
tn = tiName tabInfo
fieldInfoMap = tiFieldInfoMap tabInfo
tn = _tiName tabInfo
fieldInfoMap = _tiFieldInfoMap tabInfo
updCols = convColSpec fieldInfoMap colSpec
relInUpdErr = "relationships can't be used in update"
@ -347,7 +347,7 @@ type CreateDelPerm = CreatePerm DelPerm
buildDelPermInfo
:: (QErrM m, CacheRM m)
=> TableInfo
=> TableInfo PGColumnInfo
-> DelPerm
-> m (WithDeps DelPermInfo)
buildDelPermInfo tabInfo (DelPerm fltr) = do
@ -357,8 +357,8 @@ buildDelPermInfo tabInfo (DelPerm fltr) = do
depHeaders = getDependentHeaders fltr
return (DelPermInfo tn be depHeaders, deps)
where
tn = tiName tabInfo
fieldInfoMap = tiFieldInfoMap tabInfo
tn = _tiName tabInfo
fieldInfoMap = _tiFieldInfoMap tabInfo
type DropDelPerm = DropPerm DelPerm

View File

@ -23,6 +23,7 @@ import Hasura.RQL.GBoolExp
import Hasura.RQL.Types
import Hasura.Server.Utils
import Hasura.SQL.Types
import Hasura.SQL.Value
import qualified Database.PG.Query as Q
@ -39,7 +40,7 @@ instance ToJSON PermColSpec where
toJSON (PCCols cols) = toJSON cols
toJSON PCStar = "*"
convColSpec :: FieldInfoMap -> PermColSpec -> [PGCol]
convColSpec :: FieldInfoMap PGColumnInfo -> PermColSpec -> [PGCol]
convColSpec _ (PCCols cols) = cols
convColSpec cim PCStar = map pgiName $ getCols cim
@ -47,18 +48,18 @@ assertPermNotDefined
:: (MonadError QErr m)
=> RoleName
-> PermAccessor a
-> TableInfo
-> TableInfo PGColumnInfo
-> m ()
assertPermNotDefined roleName pa tableInfo =
when (permissionIsDefined rpi pa || roleName == adminRole)
$ throw400 AlreadyExists $ mconcat
[ "'" <> T.pack (show $ permAccToType pa) <> "'"
, " permission on " <>> tiName tableInfo
, " permission on " <>> _tiName tableInfo
, " for role " <>> roleName
, " already exists"
]
where
rpi = M.lookup roleName $ tiRolePermInfoMap tableInfo
rpi = M.lookup roleName $ _tiRolePermInfoMap tableInfo
permissionIsDefined
:: Maybe RolePermInfo -> PermAccessor a -> Bool
@ -69,21 +70,21 @@ assertPermDefined
:: (MonadError QErr m)
=> RoleName
-> PermAccessor a
-> TableInfo
-> TableInfo PGColumnInfo
-> m ()
assertPermDefined roleName pa tableInfo =
unless (permissionIsDefined rpi pa) $ throw400 PermissionDenied $ mconcat
[ "'" <> T.pack (show $ permAccToType pa) <> "'"
, " permission on " <>> tiName tableInfo
, " permission on " <>> _tiName tableInfo
, " for role " <>> roleName
, " does not exist"
]
where
rpi = M.lookup roleName $ tiRolePermInfoMap tableInfo
rpi = M.lookup roleName $ _tiRolePermInfoMap tableInfo
askPermInfo
:: (MonadError QErr m)
=> TableInfo
=> TableInfo PGColumnInfo
-> RoleName
-> PermAccessor c
-> m c
@ -91,14 +92,14 @@ askPermInfo tabInfo roleName pa =
case M.lookup roleName rpim >>= (^. paL) of
Just c -> return c
Nothing -> throw400 PermissionDenied $ mconcat
[ pt <> " permisison on " <>> tiName tabInfo
[ pt <> " permisison on " <>> _tiName tabInfo
, " for role " <>> roleName
, " does not exist"
]
where
paL = permAccToLens pa
pt = permTypeToCode $ permAccToType pa
rpim = tiRolePermInfoMap tabInfo
rpim = _tiRolePermInfoMap tabInfo
savePermToCatalog
:: (ToJSON a)
@ -174,7 +175,7 @@ data CreatePermP1Res a
procBoolExp
:: (QErrM m, CacheRM m)
=> QualifiedTable -> FieldInfoMap -> BoolExp
=> QualifiedTable -> FieldInfoMap PGColumnInfo -> BoolExp
-> m (AnnBoolExpPartialSQL, [SchemaDependency])
procBoolExp tn fieldInfoMap be = do
abe <- annBoolExp valueParser fieldInfoMap be
@ -204,22 +205,21 @@ getDependentHeaders (BoolExp boolExp) =
valueParser
:: (MonadError QErr m)
=> PgType -> Value -> m PartialSQLExp
=> PGType PGColumnType -> Value -> m PartialSQLExp
valueParser pgType = \case
-- When it is a special variable
String t
| isUserVar t -> return $ PSESessVar pgType t
| isReqUserId t -> return $ PSESessVar pgType userIdHeader
| otherwise -> return $ PSESQLExp $
S.SETyAnn (S.SELit t) $ S.mkTypeAnn pgType
| isUserVar t -> return $ mkTypedSessionVar pgType t
| isReqUserId t -> return $ mkTypedSessionVar pgType userIdHeader
-- Typical value as Aeson's value
val -> case pgType of
PgTypeSimple columnType -> PSESQLExp <$> txtRHSBuilder columnType val
PgTypeArray ofType -> do
PGTypeScalar columnType -> PSESQLExp . toTxtValue <$> parsePGScalarValue columnType val
PGTypeArray ofType -> do
vals <- runAesonParser parseJSON val
arrayExp <- S.SEArray <$> indexedForM vals (txtRHSBuilder ofType)
return $ PSESQLExp $ S.SETyAnn arrayExp $ S.mkTypeAnn pgType
WithScalarType scalarType scalarValues <- parsePGScalarValues ofType vals
return . PSESQLExp $ S.SETyAnn
(S.SEArray $ map (toTxtValue . WithScalarType scalarType) scalarValues)
(S.mkTypeAnn $ PGTypeArray scalarType)
injectDefaults :: QualifiedTable -> QualifiedTable -> Q.Query
injectDefaults qv qt =
@ -258,7 +258,7 @@ class (ToJSON a) => IsPerm a where
buildPermInfo
:: (QErrM m, CacheRM m)
=> TableInfo
=> TableInfo PGColumnInfo
-> PermDef a
-> m (WithDeps (PermInfo a))
@ -282,7 +282,7 @@ class (ToJSON a) => IsPerm a where
getPermAcc2 _ = permAccessor
validateViewPerm
:: (IsPerm a, QErrM m) => PermDef a -> TableInfo -> m ()
:: (IsPerm a, QErrM m) => PermDef a -> TableInfo PGColumnInfo -> m ()
validateViewPerm permDef tableInfo =
case permAcc of
PASelect -> return ()
@ -290,13 +290,13 @@ validateViewPerm permDef tableInfo =
PAUpdate -> mutableView tn viIsUpdatable viewInfo "updatable"
PADelete -> mutableView tn viIsDeletable viewInfo "deletable"
where
tn = tiName tableInfo
viewInfo = tiViewInfo tableInfo
tn = _tiName tableInfo
viewInfo = _tiViewInfo tableInfo
permAcc = getPermAcc1 permDef
addPermP1
:: (QErrM m, CacheRM m, IsPerm a)
=> TableInfo -> PermDef a -> m (WithDeps (PermInfo a))
=> TableInfo PGColumnInfo -> PermDef a -> m (WithDeps (PermInfo a))
addPermP1 tabInfo pd = do
assertPermNotDefined (pdRole pd) (getPermAcc1 pd) tabInfo
buildPermInfo tabInfo pd

View File

@ -35,14 +35,14 @@ import Instances.TH.Lift ()
validateManualConfig
:: (QErrM m, CacheRM m)
=> FieldInfoMap
=> FieldInfoMap PGColumnInfo
-> RelManualConfig
-> m ()
validateManualConfig fim rm = do
let colMapping = M.toList $ rmColumns rm
remoteQt = rmTable rm
remoteTabInfo <- askTabInfo remoteQt
let remoteFim = tiFieldInfoMap remoteTabInfo
let remoteFim = _tiFieldInfoMap remoteTabInfo
forM_ colMapping $ \(lCol, rCol) -> do
assertPGCol fim "" lCol
assertPGCol remoteFim "" rCol
@ -70,14 +70,14 @@ persistRel (QualifiedObject sn tn) rn relType relDef comment =
checkForFldConfilct
:: (MonadError QErr m)
=> TableInfo
=> TableInfo PGColumnInfo
-> FieldName
-> m ()
checkForFldConfilct tabInfo f =
case HM.lookup f (tiFieldInfoMap tabInfo) of
case HM.lookup f (_tiFieldInfoMap tabInfo) of
Just _ -> throw400 AlreadyExists $ mconcat
[ "column/relationship " <>> f
, " of table " <>> tiName tabInfo
, " of table " <>> _tiName tabInfo
, " already exists"
]
Nothing -> return ()
@ -90,7 +90,7 @@ validateObjRel
validateObjRel qt (RelDef rn ru _) = do
tabInfo <- askTabInfo qt
checkForFldConfilct tabInfo (fromRel rn)
let fim = tiFieldInfoMap tabInfo
let fim = _tiFieldInfoMap tabInfo
case ru of
RUFKeyOn cn -> assertPGCol fim "" cn
RUManual (ObjRelManualConfig rm) -> validateManualConfig fim rm
@ -168,11 +168,11 @@ validateArrRel
validateArrRel qt (RelDef rn ru _) = do
tabInfo <- askTabInfo qt
checkForFldConfilct tabInfo (fromRel rn)
let fim = tiFieldInfoMap tabInfo
let fim = _tiFieldInfoMap tabInfo
case ru of
RUFKeyOn (ArrRelUsingFKeyOn remoteQt rcn) -> do
remoteTabInfo <- askTabInfo remoteQt
let rfim = tiFieldInfoMap remoteTabInfo
let rfim = _tiFieldInfoMap remoteTabInfo
-- Check if 'using' column exists
assertPGCol rfim "" rcn
RUManual (ArrRelManualConfig rm) ->
@ -229,7 +229,7 @@ dropRelP1 :: (UserInfoM m, QErrM m, CacheRM m) => DropRel -> m [SchemaObjId]
dropRelP1 (DropRel qt rn cascade) = do
adminOnly
tabInfo <- askTabInfo qt
_ <- askRelType (tiFieldInfoMap tabInfo) rn ""
_ <- askRelType (_tiFieldInfoMap tabInfo) rn ""
sc <- askSchemaCache
let depObjs = getDependentObjs sc relObjId
when (depObjs /= [] && not (or cascade)) $ reportDeps depObjs
@ -279,7 +279,7 @@ validateRelP1
validateRelP1 qt rn = do
adminOnly
tabInfo <- askTabInfo qt
askRelType (tiFieldInfoMap tabInfo) rn ""
askRelType (_tiFieldInfoMap tabInfo) rn ""
setRelCommentP2
:: (QErrM m, MonadTx m)

View File

@ -6,9 +6,9 @@ import Hasura.EncJSON
import Hasura.Prelude
import Hasura.RQL.DDL.Relationship (validateRelP1)
import Hasura.RQL.DDL.Relationship.Types
import Hasura.RQL.DDL.Schema.Rename (renameRelInCatalog)
import Hasura.RQL.DDL.Schema.Table (buildSchemaCache,
checkNewInconsistentMeta)
import Hasura.RQL.DDL.Schema (buildSchemaCache,
renameRelInCatalog,
withNewInconsistentObjsCheck)
import Hasura.RQL.Types
import Hasura.SQL.Types
@ -23,11 +23,10 @@ renameRelP2
, HasSQLGenCtx m
)
=> QualifiedTable -> RelName -> RelInfo -> m ()
renameRelP2 qt newRN relInfo = do
oldSC <- askSchemaCache
renameRelP2 qt newRN relInfo = withNewInconsistentObjsCheck $ do
tabInfo <- askTabInfo qt
-- check for conflicts in fieldInfoMap
case HM.lookup (fromRel newRN) $ tiFieldInfoMap tabInfo of
case HM.lookup (fromRel newRN) $ _tiFieldInfoMap tabInfo of
Nothing -> return ()
Just _ ->
throw400 AlreadyExists $ "cannot rename relationship " <> oldRN
@ -37,9 +36,6 @@ renameRelP2 qt newRN relInfo = do
renameRelInCatalog qt oldRN newRN
-- update schema cache
buildSchemaCache
newSC <- askSchemaCache
-- check for new inconsistency
checkNewInconsistentMeta oldSC newSC
where
oldRN = riName relInfo

View File

@ -0,0 +1,117 @@
{-| This module (along with the various @Hasura.RQL.DDL.Schema.*@ modules) provides operations to
load and modify the Hasura catalog and schema cache.
* The /catalog/ refers to the set of PostgreSQL tables and views that store all schema information
known by Hasura. This includes any tracked Postgres tables, views, and functions, all remote
schemas, and any additionaly Hasura-specific information such as permissions and relationships.
Primitive functions for loading and modifying the catalog are defined in
"Hasura.RQL.DDL.Schema.Catalog", but most uses are wrapped by other functions to synchronize
catalog information with the information in the schema cache.
* The /schema cache/ is a process-global value of type 'SchemaCache' that stores an in-memory
representation of the data stored in the catalog. The in-memory representation is not identical
to the data in the catalog, since it has some post-processing applied to it in order to make it
easier to consume for other parts of the system, such as GraphQL schema generation. For example,
although column information is represented by 'PGRawColumnInfo', the schema cache contains
processed 'PGColumnInfo' values, instead.
Ultimately, the catalog is the source of truth for all information contained in the schema
cache, but to avoid rebuilding the entire schema cache on every change to the catalog, various
functions incrementally update the cache when they modify the catalog.
-}
module Hasura.RQL.DDL.Schema
( module Hasura.RQL.DDL.Schema.Cache
, module Hasura.RQL.DDL.Schema.Catalog
, module Hasura.RQL.DDL.Schema.Function
, module Hasura.RQL.DDL.Schema.Rename
, module Hasura.RQL.DDL.Schema.Table
, RunSQL(..)
, runRunSQL
) where
import Hasura.Prelude
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Database.PG.Query as Q
import qualified Database.PostgreSQL.LibPQ as PQ
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Language.Haskell.TH.Syntax (Lift)
import Hasura.EncJSON
import Hasura.RQL.DDL.Schema.Cache
import Hasura.RQL.DDL.Schema.Catalog
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.DDL.Schema.Rename
import Hasura.RQL.DDL.Schema.Table
import Hasura.RQL.Instances ()
import Hasura.RQL.Types
import Hasura.Server.Utils (matchRegex)
data RunSQL
= RunSQL
{ rSql :: Text
, rCascade :: !(Maybe Bool)
, rCheckMetadataConsistency :: !(Maybe Bool)
} deriving (Show, Eq, Lift)
$(deriveJSON (aesonDrop 1 snakeCase){omitNothingFields=True} ''RunSQL)
runRunSQL
:: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
=> RunSQL -> m EncJSON
runRunSQL (RunSQL t cascade mChkMDCnstcy) = do
adminOnly
isMDChkNeeded <- maybe (isAltrDropReplace t) return mChkMDCnstcy
bool (execRawSQL t) (withMetadataCheck (or cascade) $ execRawSQL t) isMDChkNeeded
where
execRawSQL :: (MonadTx m) => Text -> m EncJSON
execRawSQL =
fmap (encJFromJValue @RunSQLRes) .
liftTx . Q.multiQE rawSqlErrHandler . Q.fromText
where
rawSqlErrHandler txe =
let e = err400 PostgresError "query execution failed"
in e {qeInternal = Just $ toJSON txe}
isAltrDropReplace :: QErrM m => T.Text -> m Bool
isAltrDropReplace = either throwErr return . matchRegex regex False
where
throwErr s = throw500 $ "compiling regex failed: " <> T.pack s
regex = "alter|drop|replace|create function"
data RunSQLRes
= RunSQLRes
{ rrResultType :: !Text
, rrResult :: !Value
} deriving (Show, Eq)
$(deriveJSON (aesonDrop 2 snakeCase) ''RunSQLRes)
instance Q.FromRes RunSQLRes where
fromRes (Q.ResultOkEmpty _) =
return $ RunSQLRes "CommandOk" Null
fromRes (Q.ResultOkData res) = do
csvRows <- resToCSV res
return $ RunSQLRes "TuplesOk" $ toJSON csvRows
where
resToCSV :: PQ.Result -> ExceptT T.Text IO [[Text]]
resToCSV r = do
nr <- liftIO $ PQ.ntuples r
nc <- liftIO $ PQ.nfields r
hdr <- forM [0..pred nc] $ \ic -> do
colNameBS <- liftIO $ PQ.fname r ic
maybe (return "unknown") decodeBS colNameBS
rows <- forM [0..pred nr] $ \ir ->
forM [0..pred nc] $ \ic -> do
cellValBS <- liftIO $ PQ.getvalue r ir ic
maybe (return "NULL") decodeBS cellValBS
return $ hdr:rows
decodeBS = either (throwError . T.pack . show) return . TE.decodeUtf8'

View File

@ -0,0 +1,353 @@
{-| Top-level functions concerned specifically with operations on the schema cache, such as
rebuilding it from the catalog and incorporating schema changes. See the module documentation for
"Hasura.RQL.DDL.Schema" for more details.
__Note__: this module is __mutually recursive__ with other @Hasura.RQL.DDL.Schema.*@ modules, which
both define pieces of the implementation of building the schema cache and define handlers that
trigger schema cache rebuilds. -}
module Hasura.RQL.DDL.Schema.Cache
( CacheBuildM
, buildSchemaCache
, buildSchemaCacheFor
, buildSchemaCacheStrict
, buildSchemaCacheWithoutSetup
, withNewInconsistentObjsCheck
, withMetadataCheck
, purgeDependentObject
, withSchemaObject
, withSchemaObject_
) where
import Hasura.Prelude
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import Data.Aeson
import qualified Hasura.GraphQL.Schema as GS
import Hasura.Db
import Hasura.GraphQL.RemoteServer
import Hasura.RQL.DDL.Deps
import Hasura.RQL.DDL.EventTrigger
import Hasura.RQL.DDL.Permission
import Hasura.RQL.DDL.Permission.Internal
import Hasura.RQL.DDL.Relationship
import Hasura.RQL.DDL.RemoteSchema
import Hasura.RQL.DDL.Schema.Catalog
import Hasura.RQL.DDL.Schema.Diff
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.DDL.Schema.Table
import Hasura.RQL.DDL.Utils
import Hasura.RQL.Types
import Hasura.RQL.Types.Catalog
import Hasura.RQL.Types.QueryCollection
import Hasura.SQL.Types
type CacheBuildM m = (CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
buildSchemaCache :: (CacheBuildM m) => m ()
buildSchemaCache = buildSchemaCacheWithOptions True
buildSchemaCacheWithoutSetup :: (CacheBuildM m) => m ()
buildSchemaCacheWithoutSetup = buildSchemaCacheWithOptions False
buildSchemaCacheWithOptions :: (CacheBuildM m) => Bool -> m ()
buildSchemaCacheWithOptions withSetup = do
-- clean hdb_views
when withSetup $ liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews
-- reset the current schemacache
writeSchemaCache emptySchemaCache
sqlGenCtx <- askSQLGenCtx
-- fetch all catalog metadata
CatalogMetadata tables relationships permissions
eventTriggers remoteSchemas functions fkeys' allowlistDefs
<- liftTx fetchCatalogData
let fkeys = HS.fromList fkeys'
-- tables
modTableCache =<< buildTableCache tables
-- relationships
forM_ relationships $ \(CatalogRelation qt rn rt rDef cmnt) -> do
let objId = MOTableObj qt $ MTORel rn rt
def = toJSON $ WithTable qt $ RelDef rn rDef cmnt
mkInconsObj = InconsistentMetadataObj objId (MOTRel rt) def
modifyErr (\e -> "table " <> qt <<> "; rel " <> rn <<> "; " <> e) $
withSchemaObject_ mkInconsObj $
case rt of
ObjRel -> do
using <- decodeValue rDef
let relDef = RelDef rn using Nothing
validateObjRel qt relDef
objRelP2Setup qt fkeys relDef
ArrRel -> do
using <- decodeValue rDef
let relDef = RelDef rn using Nothing
validateArrRel qt relDef
arrRelP2Setup qt fkeys relDef
-- permissions
forM_ permissions $ \(CatalogPermission qt rn pt pDef cmnt) -> do
let objId = MOTableObj qt $ MTOPerm rn pt
def = toJSON $ WithTable qt $ PermDef rn pDef cmnt
mkInconsObj = InconsistentMetadataObj objId (MOTPerm pt) def
modifyErr (\e -> "table " <> qt <<> "; role " <> rn <<> "; " <> e) $
withSchemaObject_ mkInconsObj $
case pt of
PTInsert -> permHelper withSetup sqlGenCtx qt rn pDef PAInsert
PTSelect -> permHelper withSetup sqlGenCtx qt rn pDef PASelect
PTUpdate -> permHelper withSetup sqlGenCtx qt rn pDef PAUpdate
PTDelete -> permHelper withSetup sqlGenCtx qt rn pDef PADelete
-- event triggers
forM_ eventTriggers $ \(CatalogEventTrigger qt trn configuration) -> do
let objId = MOTableObj qt $ MTOTrigger trn
def = object ["table" .= qt, "configuration" .= configuration]
mkInconsObj = InconsistentMetadataObj objId MOTEventTrigger def
withSchemaObject_ mkInconsObj $ do
etc <- decodeValue configuration
subTableP2Setup qt etc
allCols <- getCols . _tiFieldInfoMap <$> askTabInfo qt
when withSetup $ liftTx $
mkAllTriggersQ trn qt allCols (stringifyNum sqlGenCtx) (etcDefinition etc)
-- sql functions
forM_ functions $ \(CatalogFunction qf rawfiM) -> do
let def = toJSON $ TrackFunction qf
mkInconsObj =
InconsistentMetadataObj (MOFunction qf) MOTFunction def
modifyErr (\e -> "function " <> qf <<> "; " <> e) $
withSchemaObject_ mkInconsObj $ do
rawfi <- onNothing rawfiM $
throw400 NotExists $ "no such function exists in postgres : " <>> qf
trackFunctionP2Setup qf rawfi
-- allow list
replaceAllowlist $ concatMap _cdQueries allowlistDefs
-- build GraphQL context with tables and functions
GS.buildGCtxMapPG
-- remote schemas
forM_ remoteSchemas resolveSingleRemoteSchema
where
permHelper setup sqlGenCtx qt rn pDef pa = do
qCtx <- mkAdminQCtx sqlGenCtx <$> askSchemaCache
perm <- decodeValue pDef
let permDef = PermDef rn perm Nothing
createPerm = WithTable qt permDef
(permInfo, deps) <- liftP1WithQCtx qCtx $ createPermP1 createPerm
when setup $ addPermP2Setup qt permDef permInfo
addPermToCache qt rn pa permInfo deps
-- p2F qt rn p1Res
resolveSingleRemoteSchema rs = do
let AddRemoteSchemaQuery name _ _ = rs
mkInconsObj = InconsistentMetadataObj (MORemoteSchema name)
MOTRemoteSchema (toJSON rs)
withSchemaObject_ mkInconsObj $ do
rsCtx <- addRemoteSchemaP2Setup rs
sc <- askSchemaCache
let gCtxMap = scGCtxMap sc
defGCtx = scDefaultRemoteGCtx sc
rGCtx = convRemoteGCtx $ rscGCtx rsCtx
mergedGCtxMap <- mergeRemoteSchema gCtxMap rGCtx
mergedDefGCtx <- mergeGCtx defGCtx rGCtx
writeSchemaCache sc { scGCtxMap = mergedGCtxMap
, scDefaultRemoteGCtx = mergedDefGCtx
}
-- | Rebuilds the schema cache. If an object with the given object id became newly inconsistent,
-- raises an error about it specifically. Otherwise, raises a generic metadata inconsistency error.
buildSchemaCacheFor :: (CacheBuildM m) => MetadataObjId -> m ()
buildSchemaCacheFor objectId = do
oldSchemaCache <- askSchemaCache
buildSchemaCache
newSchemaCache <- askSchemaCache
let diffInconsistentObjects = getDifference _moId `on` scInconsistentObjs
newInconsistentObjects = newSchemaCache `diffInconsistentObjects` oldSchemaCache
for_ (find ((== objectId) . _moId) newInconsistentObjects) $ \matchingObject ->
throw400 ConstraintViolation (_moReason matchingObject)
unless (null newInconsistentObjects) $
throwError (err400 Unexpected "cannot continue due to new inconsistent metadata")
{ qeInternal = Just $ toJSON newInconsistentObjects }
-- | Like 'buildSchemaCache', but fails if there is any inconsistent metadata.
buildSchemaCacheStrict :: (CacheBuildM m) => m ()
buildSchemaCacheStrict = do
buildSchemaCache
sc <- askSchemaCache
let inconsObjs = scInconsistentObjs sc
unless (null inconsObjs) $ do
let err = err400 Unexpected "cannot continue due to inconsistent metadata"
throwError err{qeInternal = Just $ toJSON inconsObjs}
-- | Executes the given action, and if any new 'InconsistentMetadataObj's are added to the schema
-- cache as a result of its execution, raises an error.
withNewInconsistentObjsCheck :: (QErrM m, CacheRM m) => m a -> m a
withNewInconsistentObjsCheck action = do
originalObjects <- scInconsistentObjs <$> askSchemaCache
result <- action
currentObjects <- scInconsistentObjs <$> askSchemaCache
checkNewInconsistentMeta originalObjects currentObjects
pure result
-- | @'withMetadataCheck' cascade action@ runs @action@ and checks if the schema changed as a
-- result. If it did, it checks to ensure the changes do not violate any integrity constraints, and
-- if not, incorporates them into the schema cache.
withMetadataCheck :: (CacheBuildM m) => Bool -> m a -> m a
withMetadataCheck cascade action = do
-- Drop hdb_views so no interference is caused to the sql query
liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews
-- Get the metadata before the sql query, everything, need to filter this
oldMetaU <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta
oldFuncMetaU <- liftTx $ Q.catchE defaultTxErrorHandler fetchFunctionMeta
-- Run the action
res <- action
-- Get the metadata after the sql query
newMeta <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta
newFuncMeta <- liftTx $ Q.catchE defaultTxErrorHandler fetchFunctionMeta
sc <- askSchemaCache
let existingInconsistentObjs = scInconsistentObjs sc
existingTables = M.keys $ scTables sc
oldMeta = flip filter oldMetaU $ \tm -> tmTable tm `elem` existingTables
schemaDiff = getSchemaDiff oldMeta newMeta
existingFuncs = M.keys $ scFunctions sc
oldFuncMeta = flip filter oldFuncMetaU $ \fm -> funcFromMeta fm `elem` existingFuncs
FunctionDiff droppedFuncs alteredFuncs = getFuncDiff oldFuncMeta newFuncMeta
overloadedFuncs = getOverloadedFuncs existingFuncs newFuncMeta
-- Do not allow overloading functions
unless (null overloadedFuncs) $
throw400 NotSupported $ "the following tracked function(s) cannot be overloaded: "
<> reportFuncs overloadedFuncs
indirectDeps <- getSchemaChangeDeps schemaDiff
-- Report back with an error if cascade is not set
when (indirectDeps /= [] && not cascade) $ reportDepsExt indirectDeps []
-- Purge all the indirect dependents from state
mapM_ purgeDependentObject indirectDeps
-- Purge all dropped functions
let purgedFuncs = flip mapMaybe indirectDeps $ \dep ->
case dep of
SOFunction qf -> Just qf
_ -> Nothing
forM_ (droppedFuncs \\ purgedFuncs) $ \qf -> do
liftTx $ delFunctionFromCatalog qf
delFunctionFromCache qf
-- Process altered functions
forM_ alteredFuncs $ \(qf, newTy) ->
when (newTy == FTVOLATILE) $
throw400 NotSupported $
"type of function " <> qf <<> " is altered to \"VOLATILE\" which is not supported now"
-- update the schema cache and hdb_catalog with the changes
reloadRequired <- processSchemaChanges schemaDiff
let withReload = do -- in case of any rename
buildSchemaCache
currentInconsistentObjs <- scInconsistentObjs <$> askSchemaCache
checkNewInconsistentMeta existingInconsistentObjs currentInconsistentObjs
withoutReload = do
postSc <- askSchemaCache
-- recreate the insert permission infra
forM_ (M.elems $ scTables postSc) $ \ti -> do
let tn = _tiName ti
forM_ (M.elems $ _tiRolePermInfoMap ti) $ \rpi ->
maybe (return ()) (liftTx . buildInsInfra tn) $ _permIns rpi
strfyNum <- stringifyNum <$> askSQLGenCtx
--recreate triggers
forM_ (M.elems $ scTables postSc) $ \ti -> do
let tn = _tiName ti
cols = getCols $ _tiFieldInfoMap ti
forM_ (M.toList $ _tiEventTriggerInfoMap ti) $ \(trn, eti) -> do
let fullspec = etiOpsDef eti
liftTx $ mkAllTriggersQ trn tn cols strfyNum fullspec
bool withoutReload withReload reloadRequired
return res
where
reportFuncs = T.intercalate ", " . map dquoteTxt
processSchemaChanges :: (MonadTx m, CacheRWM m) => SchemaDiff -> m Bool
processSchemaChanges schemaDiff = do
-- Purge the dropped tables
mapM_ delTableAndDirectDeps droppedTables
sc <- askSchemaCache
fmap or $ forM alteredTables $ \(oldQtn, tableDiff) -> do
ti <- case M.lookup oldQtn $ scTables sc of
Just ti -> return ti
Nothing -> throw500 $ "old table metadata not found in cache : " <>> oldQtn
processTableChanges ti tableDiff
where
SchemaDiff droppedTables alteredTables = schemaDiff
checkNewInconsistentMeta
:: (QErrM m)
=> [InconsistentMetadataObj] -> [InconsistentMetadataObj] -> m ()
checkNewInconsistentMeta originalInconsMeta currentInconsMeta =
unless (null newInconsMetaObjects) $
throwError (err500 Unexpected "cannot continue due to newly found inconsistent metadata")
{ qeInternal = Just $ toJSON newInconsMetaObjects }
where
newInconsMetaObjects = getDifference _moId currentInconsMeta originalInconsMeta
purgeDependentObject :: (CacheRWM m, MonadTx m) => SchemaObjId -> m ()
purgeDependentObject schemaObjId = case schemaObjId of
(SOTableObj tn (TOPerm rn pt)) -> do
liftTx $ dropPermFromCatalog tn rn pt
withPermType pt delPermFromCache rn tn
(SOTableObj qt (TORel rn)) -> do
liftTx $ delRelFromCatalog qt rn
delRelFromCache rn qt
(SOFunction qf) -> do
liftTx $ delFunctionFromCatalog qf
delFunctionFromCache qf
(SOTableObj qt (TOTrigger trn)) -> do
liftTx $ delEventTriggerFromCatalog trn
delEventTriggerFromCache qt trn
_ -> throw500 $
"unexpected dependent object : " <> reportSchemaObj schemaObjId
-- | @'withSchemaObject' f action@ runs @action@, and if it raises any errors, applies @f@ to the
-- error message to produce an 'InconsistentMetadataObj', then adds the object to the schema cache
-- and returns 'Nothing' instead of aborting.
withSchemaObject :: (QErrM m, CacheRWM m) => (Text -> InconsistentMetadataObj) -> m a -> m (Maybe a)
withSchemaObject f action =
(Just <$> action) `catchError` \err -> do
sc <- askSchemaCache
let inconsObj = f $ qeError err
allInconsObjs = inconsObj:scInconsistentObjs sc
writeSchemaCache sc { scInconsistentObjs = allInconsObjs }
pure Nothing
withSchemaObject_ :: (QErrM m, CacheRWM m) => (Text -> InconsistentMetadataObj) -> m () -> m ()
withSchemaObject_ f = void . withSchemaObject f

View File

@ -0,0 +1,20 @@
module Hasura.RQL.DDL.Schema.Cache where
import Hasura.Prelude
import Hasura.Db
import Hasura.RQL.Types
type CacheBuildM m = (CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
buildSchemaCacheStrict :: (CacheBuildM m) => m ()
buildSchemaCacheFor :: (CacheBuildM m) => MetadataObjId -> m ()
buildSchemaCache :: (CacheBuildM m) => m ()
buildSchemaCacheWithoutSetup :: (CacheBuildM m) => m ()
withNewInconsistentObjsCheck :: (QErrM m, CacheRM m) => m a -> m a
withMetadataCheck :: (CacheBuildM m) => Bool -> m a -> m a
purgeDependentObject :: (CacheRWM m, MonadTx m) => SchemaObjId -> m ()
withSchemaObject :: (QErrM m, CacheRWM m) => (Text -> InconsistentMetadataObj) -> m a -> m (Maybe a)
withSchemaObject_ :: (QErrM m, CacheRWM m) => (Text -> InconsistentMetadataObj) -> m () -> m ()

View File

@ -0,0 +1,39 @@
-- | Functions for loading and modifying the catalog. See the module documentation for
-- "Hasura.RQL.DDL.Schema" for more details.
module Hasura.RQL.DDL.Schema.Catalog
( fetchCatalogData
, saveTableToCatalog
, updateTableIsEnumInCatalog
, deleteTableFromCatalog
) where
import Hasura.Prelude
import qualified Database.PG.Query as Q
import Hasura.Db
import Hasura.RQL.Types.Catalog
import Hasura.SQL.Types
fetchCatalogData :: (MonadTx m) => m CatalogMetadata
fetchCatalogData = liftTx $ Q.getAltJ . runIdentity . Q.getRow <$> Q.withQE defaultTxErrorHandler
$(Q.sqlFromFile "src-rsr/catalog_metadata.sql") () True
saveTableToCatalog :: (MonadTx m) => QualifiedTable -> Bool -> m ()
saveTableToCatalog (QualifiedObject sn tn) isEnum = liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql|
INSERT INTO "hdb_catalog"."hdb_table" (table_schema, table_name, is_enum)
VALUES ($1, $2, $3)
|] (sn, tn, isEnum) False
updateTableIsEnumInCatalog :: (MonadTx m) => QualifiedTable -> Bool -> m ()
updateTableIsEnumInCatalog (QualifiedObject sn tn) isEnum =
liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql|
UPDATE "hdb_catalog"."hdb_table" SET is_enum = $3
WHERE table_schema = $1 AND table_name = $2
|] (sn, tn, isEnum) False
deleteTableFromCatalog :: (MonadTx m) => QualifiedTable -> m ()
deleteTableFromCatalog (QualifiedObject sn tn) = liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql|
DELETE FROM "hdb_catalog"."hdb_table"
WHERE table_schema = $1 AND table_name = $2
|] (sn, tn) False

View File

@ -40,8 +40,9 @@ data PGColMeta
= PGColMeta
{ pcmColumnName :: !PGCol
, pcmOrdinalPosition :: !Int
, pcmDataType :: !PGColType
, pcmDataType :: !PGScalarType
, pcmIsNullable :: !Bool
, pcmReferences :: ![QualifiedTable]
} deriving (Show, Eq)
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''PGColMeta)
@ -87,8 +88,8 @@ data TableDiff
= TableDiff
{ _tdNewName :: !(Maybe QualifiedTable)
, _tdDroppedCols :: ![PGCol]
, _tdAddedCols :: ![PGColInfo]
, _tdAlteredCols :: ![(PGColInfo, PGColInfo)]
, _tdAddedCols :: ![PGRawColumnInfo]
, _tdAlteredCols :: ![(PGRawColumnInfo, PGRawColumnInfo)]
, _tdDroppedFKeyCons :: ![ConstraintName]
-- The final list of uniq/primary constraint names
-- used for generating types on_conflict clauses
@ -116,8 +117,8 @@ getTableDiff oldtm newtm =
existingCols = getOverlap pcmOrdinalPosition oldCols newCols
pcmToPci (PGColMeta colName _ colType isNullable)
= PGColInfo colName colType isNullable
pcmToPci (PGColMeta colName _ colType isNullable references)
= PGRawColumnInfo colName colType isNullable references
alteredCols =
flip map (filter (uncurry (/=)) existingCols) $ pcmToPci *** pcmToPci
@ -137,7 +138,7 @@ getTableDiff oldtm newtm =
getTableChangeDeps
:: (QErrM m, CacheRWM m)
=> TableInfo -> TableDiff -> m [SchemaObjId]
=> TableInfo PGColumnInfo -> TableDiff -> m [SchemaObjId]
getTableChangeDeps ti tableDiff = do
sc <- askSchemaCache
-- for all the dropped columns
@ -150,7 +151,7 @@ getTableChangeDeps ti tableDiff = do
return $ getDependentObjs sc objId
return $ droppedConsDeps <> droppedColDeps
where
tn = tiName ti
tn = _tiName ti
TableDiff _ droppedCols _ _ droppedFKeyConstraints _ = tableDiff
data SchemaDiff

View File

@ -0,0 +1,135 @@
-- | Types and functions for interacting with and manipulating SQL enums represented by
-- /single-column tables/, __not__ native Postgres enum types. Native enum types in Postgres are
-- difficult to change, so we discourage their use, but we might add support for native enum types
-- in the future.
module Hasura.RQL.DDL.Schema.Enum (
-- * Re-exports from "Hasura.RQL.Types.Column"
EnumReference(..)
, EnumValues
, EnumValueInfo(..)
, EnumValue(..)
-- * Loading enum values
, fetchAndValidateEnumValues
) where
import Hasura.Prelude
import Control.Monad.Validate
import Data.List (delete)
import qualified Data.HashMap.Strict as M
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.Db
import Hasura.GraphQL.Utils
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Error
import Hasura.SQL.Types
import qualified Hasura.SQL.DML as S
data EnumTableIntegrityError
= EnumTableMissingPrimaryKey
| EnumTableMultiColumnPrimaryKey ![PGCol]
| EnumTableNonTextualPrimaryKey !PGRawColumnInfo
| EnumTableNoEnumValues
| EnumTableInvalidEnumValueNames !(NE.NonEmpty T.Text)
| EnumTableNonTextualCommentColumn !PGRawColumnInfo
| EnumTableTooManyColumns ![PGCol]
deriving (Show, Eq)
fetchAndValidateEnumValues
:: (MonadTx m)
=> QualifiedTable
-> [PGRawColumnInfo]
-> [PGRawColumnInfo]
-> m EnumValues
fetchAndValidateEnumValues tableName primaryKeyColumns columnInfos =
either (throw400 ConstraintViolation . showErrors) pure =<< runValidateT fetchAndValidate
where
fetchAndValidate :: (MonadTx m, MonadValidate [EnumTableIntegrityError] m) => m EnumValues
fetchAndValidate = do
maybePrimaryKey <- tolerate validatePrimaryKey
maybeCommentColumn <- validateColumns maybePrimaryKey
enumValues <- maybe (refute mempty) (fetchEnumValues maybeCommentColumn) maybePrimaryKey
validateEnumValues enumValues
pure enumValues
where
validatePrimaryKey = case primaryKeyColumns of
[] -> refute [EnumTableMissingPrimaryKey]
[column] -> case prciType column of
PGText -> pure column
_ -> refute [EnumTableNonTextualPrimaryKey column]
_ -> refute [EnumTableMultiColumnPrimaryKey $ map prciName primaryKeyColumns]
validateColumns primaryKeyColumn = do
let nonPrimaryKeyColumns = maybe columnInfos (`delete` columnInfos) primaryKeyColumn
case nonPrimaryKeyColumns of
[] -> pure Nothing
[column] -> case prciType column of
PGText -> pure $ Just column
_ -> dispute [EnumTableNonTextualCommentColumn column] $> Nothing
columns -> dispute [EnumTableTooManyColumns $ map prciName columns] $> Nothing
fetchEnumValues maybeCommentColumn primaryKeyColumn = do
let nullExtr = S.Extractor S.SENull Nothing
commentExtr = maybe nullExtr (S.mkExtr . prciName) maybeCommentColumn
query = Q.fromBuilder $ toSQL S.mkSelect
{ S.selFrom = Just $ S.mkSimpleFromExp tableName
, S.selExtr = [S.mkExtr (prciName primaryKeyColumn), commentExtr] }
fmap mkEnumValues . liftTx $ Q.withQE defaultTxErrorHandler query () True
mkEnumValues rows = M.fromList . flip map rows $ \(key, comment) ->
(EnumValue key, EnumValueInfo comment)
validateEnumValues enumValues = do
let enumValueNames = map (G.Name . getEnumValue) (M.keys enumValues)
when (null enumValueNames) $
refute [EnumTableNoEnumValues]
let badNames = map G.unName $ filter (not . isValidEnumName) enumValueNames
for_ (NE.nonEmpty badNames) $ \someBadNames ->
refute [EnumTableInvalidEnumValueNames someBadNames]
-- https://graphql.github.io/graphql-spec/June2018/#EnumValue
isValidEnumName name =
isValidName name && name `notElem` ["true", "false", "null"]
showErrors :: [EnumTableIntegrityError] -> T.Text
showErrors allErrors =
"the table " <> tableName <<> " cannot be used as an enum " <> reasonsMessage
where
reasonsMessage = case allErrors of
[singleError] -> "because " <> showOne singleError
_ -> "for the following reasons:\n" <> T.unlines
(map (("" <>) . showOne) allErrors)
showOne :: EnumTableIntegrityError -> T.Text
showOne = \case
EnumTableMissingPrimaryKey -> "the table must have a primary key"
EnumTableMultiColumnPrimaryKey cols ->
"the tables primary key must not span multiple columns ("
<> T.intercalate ", " (map dquoteTxt $ sort cols) <> ")"
EnumTableNonTextualPrimaryKey colInfo -> typeMismatch "primary key" colInfo PGText
EnumTableNoEnumValues -> "the table must have at least one row"
EnumTableInvalidEnumValueNames values ->
let pluralString = " are not valid GraphQL enum value names"
valuesString = case NE.reverse (NE.sort values) of
value NE.:| [] -> "value " <> value <<> " is not a valid GraphQL enum value name"
value2 NE.:| [value1] -> "values " <> value1 <<> " and " <> value2 <<> pluralString
lastValue NE.:| otherValues ->
"values " <> T.intercalate ", " (map dquoteTxt $ reverse otherValues) <> ", and "
<> lastValue <<> pluralString
in "the " <> valuesString
EnumTableNonTextualCommentColumn colInfo -> typeMismatch "comment column" colInfo PGText
EnumTableTooManyColumns cols ->
"the table must have exactly one primary key and optionally one comment column, not "
<> T.pack (show $ length cols) <> " columns ("
<> T.intercalate ", " (map dquoteTxt $ sort cols) <> ")"
where
typeMismatch description colInfo expected =
"the tables " <> description <> " (" <> prciName colInfo <<> ") must have type "
<> expected <<> ", not type " <>> prciType colInfo

View File

@ -42,13 +42,13 @@ data RawFuncInfo
, rfiReturnTypeName :: !T.Text
, rfiReturnTypeType :: !PGTypType
, rfiReturnsSet :: !Bool
, rfiInputArgTypes :: ![PGColType]
, rfiInputArgTypes :: ![PGScalarType]
, rfiInputArgNames :: ![T.Text]
, rfiReturnsTable :: !Bool
} deriving (Show, Eq)
$(deriveJSON (aesonDrop 3 snakeCase) ''RawFuncInfo)
mkFunctionArgs :: [PGColType] -> [T.Text] -> [FunctionArg]
mkFunctionArgs :: [PGScalarType] -> [T.Text] -> [FunctionArg]
mkFunctionArgs tys argNames =
bool withNames withNoNames $ null argNames
where

View File

@ -1,3 +1,6 @@
-- | Functions for mutating the catalog (with integrity checking) to incorporate schema changes
-- discovered after applying a user-supplied SQL query. None of these functions modify the schema
-- cache, so it must be reloaded after the catalog is updated.
module Hasura.RQL.DDL.Schema.Rename
( renameTableInCatalog
, renameColInCatalog
@ -70,7 +73,7 @@ renameTableInCatalog newQT oldQT = do
renameColInCatalog
:: (MonadTx m, CacheRM m)
=> PGCol -> PGCol -> QualifiedTable -> TableInfo -> m ()
=> PGCol -> PGCol -> QualifiedTable -> TableInfo PGColumnInfo -> m ()
renameColInCatalog oCol nCol qt ti = do
sc <- askSchemaCache
-- Check if any relation exists with new column name
@ -90,7 +93,7 @@ renameColInCatalog oCol nCol qt ti = do
where
errMsg = "cannot rename column " <> oCol <<> " to " <>> nCol
assertFldNotExists =
case M.lookup (fromPGCol oCol) $ tiFieldInfoMap ti of
case M.lookup (fromPGCol oCol) $ _tiFieldInfoMap ti of
Just (FIRelationship _) ->
throw400 AlreadyExists $ "cannot rename column " <> oCol
<<> " to " <> nCol <<> " in table " <> qt <<>

View File

@ -1,70 +1,86 @@
{- |
Description: Create/delete SQL tables to/from Hasura metadata.
-}
-- | Description: Create/delete SQL tables to/from Hasura metadata.
module Hasura.RQL.DDL.Schema.Table
( TrackTable(..)
, runTrackTableQ
, trackExistingTableOrViewP2
{-# LANGUAGE TypeApplications #-}
, UntrackTable(..)
, runUntrackTableQ
module Hasura.RQL.DDL.Schema.Table where
, SetTableIsEnum(..)
, runSetExistingTableIsEnumQ
, buildTableCache
, delTableAndDirectDeps
, processTableChanges
) where
import Hasura.EncJSON
import Hasura.GraphQL.RemoteServer
import Hasura.Prelude
import Hasura.RQL.DDL.Deps
import Hasura.RQL.DDL.EventTrigger
import Hasura.RQL.DDL.Permission
import Hasura.RQL.DDL.Permission.Internal
import Hasura.RQL.DDL.Relationship
import Hasura.RQL.DDL.RemoteSchema
import {-# SOURCE #-} Hasura.RQL.DDL.Schema.Cache
import Hasura.RQL.DDL.Schema.Catalog
import Hasura.RQL.DDL.Schema.Diff
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.DDL.Schema.Enum
import Hasura.RQL.DDL.Schema.Rename
import Hasura.RQL.DDL.Utils
import Hasura.RQL.Types
import Hasura.RQL.Types.Catalog
import Hasura.RQL.Types.QueryCollection
import Hasura.Server.Utils (matchRegex)
import Hasura.SQL.Types
import qualified Database.PG.Query as Q
import qualified Hasura.GraphQL.Schema as GS
import qualified Database.PG.Query as Q
import qualified Hasura.GraphQL.Schema as GS
import Control.Lens.Extended hiding ((.=))
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
import Network.URI.Extended ()
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
import Network.URI.Extended ()
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Database.PostgreSQL.LibPQ as PQ
import qualified Data.HashMap.Strict as M
import qualified Data.Text as T
delTableFromCatalog :: QualifiedTable -> Q.Tx ()
delTableFromCatalog (QualifiedObject sn tn) =
Q.unitQ [Q.sql|
DELETE FROM "hdb_catalog"."hdb_table"
WHERE table_schema = $1 AND table_name = $2
|] (sn, tn) False
saveTableToCatalog :: QualifiedTable -> Q.Tx ()
saveTableToCatalog (QualifiedObject sn tn) =
Q.unitQ [Q.sql|
INSERT INTO "hdb_catalog"."hdb_table" VALUES ($1, $2)
|] (sn, tn) False
newtype TrackTable
data TrackTable
= TrackTable
{ tName :: QualifiedTable }
deriving (Show, Eq, FromJSON, ToJSON, Lift)
{ tName :: !QualifiedTable
, tIsEnum :: !Bool
} deriving (Show, Eq, Lift)
instance FromJSON TrackTable where
parseJSON v = withOptions <|> withoutOptions
where
withOptions = flip (withObject "TrackTable") v $ \o -> TrackTable
<$> o .: "table"
<*> o .:? "is_enum" .!= False
withoutOptions = TrackTable <$> parseJSON v <*> pure False
instance ToJSON TrackTable where
toJSON (TrackTable name isEnum)
| isEnum = object [ "table" .= name, "is_enum" .= isEnum ]
| otherwise = toJSON name
data SetTableIsEnum
= SetTableIsEnum
{ stieTable :: !QualifiedTable
, stieIsEnum :: !Bool
} deriving (Show, Eq, Lift)
$(deriveJSON (aesonDrop 4 snakeCase) ''SetTableIsEnum)
data UntrackTable =
UntrackTable
{ utTable :: !QualifiedTable
, utCascade :: !(Maybe Bool)
} deriving (Show, Eq, Lift)
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''UntrackTable)
-- | Track table/view, Phase 1:
-- Validate table tracking operation. Fails if table is already being tracked,
-- or if a function with the same name is being tracked.
trackExistingTableOrViewP1
:: (CacheRM m, UserInfoM m, QErrM m) => TrackTable -> m ()
trackExistingTableOrViewP1 (TrackTable vn) = do
trackExistingTableOrViewP1 TrackTable { tName = vn } = do
adminOnly
rawSchemaCache <- askSchemaCache
when (M.member vn $ scTables rawSchemaCache) $
@ -74,192 +90,32 @@ trackExistingTableOrViewP1 (TrackTable vn) = do
throw400 NotSupported $ "function with name " <> vn <<> " already exists"
trackExistingTableOrViewP2
:: (QErrM m, CacheRWM m, MonadTx m)
=> QualifiedTable -> Bool -> m EncJSON
trackExistingTableOrViewP2 vn isSystemDefined = do
:: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
=> TrackTable -> m EncJSON
trackExistingTableOrViewP2 (TrackTable tableName isEnum) = do
sc <- askSchemaCache
let defGCtx = scDefaultRemoteGCtx sc
GS.checkConflictingNode defGCtx $ GS.qualObjectToName vn
tables <- liftTx fetchTableCatalog
case tables of
[] -> throw400 NotExists $ "no such table/view exists in postgres : " <>> vn
[ti] -> addTableToCache ti
_ -> throw500 $ "more than one row found for: " <>> vn
liftTx $ Q.catchE defaultTxErrorHandler $ saveTableToCatalog vn
GS.checkConflictingNode defGCtx $ GS.qualObjectToName tableName
saveTableToCatalog tableName isEnum
buildSchemaCacheFor (MOTable tableName)
return successMsg
where
QualifiedObject sn tn = vn
mkTableInfo (cols, pCols, constraints, viewInfoM) =
let colMap = M.fromList $ flip map (Q.getAltJ cols) $
\c -> (fromPGCol $ pgiName c, FIColumn c)
in TableInfo vn isSystemDefined colMap mempty (Q.getAltJ constraints)
(Q.getAltJ pCols) (Q.getAltJ viewInfoM) mempty
fetchTableCatalog = map mkTableInfo <$>
Q.listQE defaultTxErrorHandler [Q.sql|
SELECT columns, primary_key_columns,
constraints, view_info
FROM hdb_catalog.hdb_table_info_agg
WHERE table_schema = $1 AND table_name = $2
|] (sn, tn) True
runTrackTableQ
:: (QErrM m, CacheRWM m, MonadTx m, UserInfoM m)
:: (QErrM m, CacheRWM m, MonadTx m, UserInfoM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
=> TrackTable -> m EncJSON
runTrackTableQ q = do
trackExistingTableOrViewP1 q
trackExistingTableOrViewP2 (tName q) False
trackExistingTableOrViewP2 q
purgeDep :: (CacheRWM m, MonadTx m)
=> SchemaObjId -> m ()
purgeDep schemaObjId = case schemaObjId of
(SOTableObj tn (TOPerm rn pt)) -> do
liftTx $ dropPermFromCatalog tn rn pt
withPermType pt delPermFromCache rn tn
(SOTableObj qt (TORel rn)) -> do
liftTx $ delRelFromCatalog qt rn
delRelFromCache rn qt
(SOFunction qf) -> do
liftTx $ delFunctionFromCatalog qf
delFunctionFromCache qf
(SOTableObj qt (TOTrigger trn)) -> do
liftTx $ delEventTriggerFromCatalog trn
delEventTriggerFromCache qt trn
_ -> throw500 $
"unexpected dependent object : " <> reportSchemaObj schemaObjId
processTableChanges :: (MonadTx m, CacheRWM m)
=> TableInfo -> TableDiff -> m Bool
processTableChanges ti tableDiff = do
-- If table rename occurs then don't replace constraints and
-- process dropped/added columns, because schema reload happens eventually
sc <- askSchemaCache
let tn = tiName ti
withOldTabName = do
-- replace constraints
replaceConstraints tn
-- for all the dropped columns
procDroppedCols tn
-- for all added columns
procAddedCols tn
-- for all altered columns
procAlteredCols sc tn
withNewTabName newTN = do
let tnGQL = GS.qualObjectToName newTN
defGCtx = scDefaultRemoteGCtx sc
-- check for GraphQL schema conflicts on new name
GS.checkConflictingNode defGCtx tnGQL
void $ procAlteredCols sc tn
-- update new table in catalog
renameTableInCatalog newTN tn
return True
maybe withOldTabName withNewTabName mNewName
where
TableDiff mNewName droppedCols addedCols alteredCols _ constraints = tableDiff
replaceConstraints tn = flip modTableInCache tn $ \tInfo ->
return $ tInfo {tiUniqOrPrimConstraints = constraints}
procDroppedCols tn =
forM_ droppedCols $ \droppedCol ->
-- Drop the column from the cache
delColFromCache droppedCol tn
procAddedCols tn =
-- In the newly added columns check that there is no conflict with relationships
forM_ addedCols $ \pci@(PGColInfo colName _ _) ->
case M.lookup (fromPGCol colName) $ tiFieldInfoMap ti of
Just (FIRelationship _) ->
throw400 AlreadyExists $ "cannot add column " <> colName
<<> " in table " <> tn <<>
" as a relationship with the name already exists"
_ -> addColToCache colName pci tn
procAlteredCols sc tn = fmap or $ forM alteredCols $
\( PGColInfo oColName oColTy oNullable
, npci@(PGColInfo nColName nColTy nNullable)
) ->
if | oColName /= nColName -> do
renameColInCatalog oColName nColName tn ti
return True
| oColTy /= nColTy -> do
let colId = SOTableObj tn $ TOCol oColName
typeDepObjs = getDependentObjsWith (== DROnType) sc colId
-- Raise exception if any objects found which are dependant on column type
unless (null typeDepObjs) $ throw400 DependencyError $
"cannot change type of column " <> oColName <<> " in table "
<> tn <<> " because of the following dependencies : " <>
reportSchemaObjs typeDepObjs
-- Update column type in cache
updColInCache nColName npci tn
-- If any dependant permissions found with the column whose type
-- being altered is provided with a session variable,
-- then rebuild permission info and update the cache
let sessVarDepObjs =
getDependentObjsWith (== DRSessionVariable) sc colId
forM_ sessVarDepObjs $ \objId ->
case objId of
SOTableObj qt (TOPerm rn pt) -> rebuildPermInfo qt rn pt
_ -> throw500
"unexpected schema dependency found for altering column type"
return False
| oNullable /= nNullable -> do
updColInCache nColName npci tn
return False
| otherwise -> return False
delTableAndDirectDeps
:: (QErrM m, CacheRWM m, MonadTx m) => QualifiedTable -> m ()
delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do
liftTx $ Q.catchE defaultTxErrorHandler $ do
Q.unitQ [Q.sql|
DELETE FROM "hdb_catalog"."hdb_relationship"
WHERE table_schema = $1 AND table_name = $2
|] (sn, tn) False
Q.unitQ [Q.sql|
DELETE FROM "hdb_catalog"."hdb_permission"
WHERE table_schema = $1 AND table_name = $2
|] (sn, tn) False
Q.unitQ [Q.sql|
DELETE FROM "hdb_catalog"."event_triggers"
WHERE schema_name = $1 AND table_name = $2
|] (sn, tn) False
delTableFromCatalog qtn
delTableFromCache qtn
processSchemaChanges :: (MonadTx m, CacheRWM m) => SchemaDiff -> m Bool
processSchemaChanges schemaDiff = do
-- Purge the dropped tables
mapM_ delTableAndDirectDeps droppedTables
sc <- askSchemaCache
fmap or $ forM alteredTables $ \(oldQtn, tableDiff) -> do
ti <- case M.lookup oldQtn $ scTables sc of
Just ti -> return ti
Nothing -> throw500 $ "old table metadata not found in cache : " <>> oldQtn
processTableChanges ti tableDiff
where
SchemaDiff droppedTables alteredTables = schemaDiff
data UntrackTable =
UntrackTable
{ utTable :: !QualifiedTable
, utCascade :: !(Maybe Bool)
} deriving (Show, Eq, Lift)
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''UntrackTable)
runSetExistingTableIsEnumQ
:: (QErrM m, CacheRWM m, MonadTx m, UserInfoM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
=> SetTableIsEnum -> m EncJSON
runSetExistingTableIsEnumQ (SetTableIsEnum tableName isEnum) = do
adminOnly
void $ askTabInfo tableName -- assert that table is tracked
updateTableIsEnumInCatalog tableName isEnum
buildSchemaCacheFor (MOTable tableName)
return successMsg
unTrackExistingTableOrViewP1
:: (CacheRM m, UserInfoM m, QErrM m) => UntrackTable -> m ()
@ -269,7 +125,7 @@ unTrackExistingTableOrViewP1 (UntrackTable vn _) = do
case M.lookup vn (scTables rawSchemaCache) of
Just ti ->
-- Check if table/view is system defined
when (tiSystemDefined ti) $ throw400 NotSupported $
when (_tiSystemDefined ti) $ throw400 NotSupported $
vn <<> " is system defined, cannot untrack"
Nothing -> throw400 AlreadyUntracked $
"view/table already untracked : " <>> vn
@ -287,8 +143,8 @@ unTrackExistingTableOrViewP2 (UntrackTable qtn cascade) = do
-- Report bach with an error if cascade is not set
when (indirectDeps /= [] && not (or cascade)) $ reportDepsExt indirectDeps []
-- Purge all the dependants from state
mapM_ purgeDep indirectDeps
-- Purge all the dependents from state
mapM_ purgeDependentObject indirectDeps
-- delete the table and its direct dependencies
delTableAndDirectDeps qtn
@ -306,331 +162,187 @@ runUntrackTableQ q = do
unTrackExistingTableOrViewP1 q
unTrackExistingTableOrViewP2 q
handleInconsistentObj
:: (QErrM m, CacheRWM m)
=> (T.Text -> InconsistentMetadataObj)
-> m ()
-> m ()
handleInconsistentObj f action =
action `catchError` \err -> do
sc <- askSchemaCache
let inconsObj = f $ qeError err
allInconsObjs = inconsObj:scInconsistentObjs sc
writeSchemaCache $ sc{scInconsistentObjs = allInconsObjs}
checkNewInconsistentMeta
:: (QErrM m)
=> SchemaCache -- old schema cache
-> SchemaCache -- new schema cache
-> m ()
checkNewInconsistentMeta oldSC newSC =
unless (null newInconsMetaObjects) $ do
let err = err500 Unexpected
"cannot continue due to newly found inconsistent metadata"
throwError err{qeInternal = Just $ toJSON newInconsMetaObjects}
where
oldInconsMeta = scInconsistentObjs oldSC
newInconsMeta = scInconsistentObjs newSC
newInconsMetaObjects = getDifference _moId newInconsMeta oldInconsMeta
buildSchemaCacheStrict
:: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
=> m ()
buildSchemaCacheStrict = do
buildSchemaCache
processTableChanges :: (MonadTx m, CacheRWM m)
=> TableInfo PGColumnInfo -> TableDiff -> m Bool
processTableChanges ti tableDiff = do
-- If table rename occurs then don't replace constraints and
-- process dropped/added columns, because schema reload happens eventually
sc <- askSchemaCache
let inconsObjs = scInconsistentObjs sc
unless (null inconsObjs) $ do
let err = err400 Unexpected "cannot continue due to inconsistent metadata"
throwError err{qeInternal = Just $ toJSON inconsObjs}
let tn = _tiName ti
withOldTabName = do
replaceConstraints tn
procDroppedCols tn
procAddedCols tn
procAlteredCols sc tn
buildSchemaCache
:: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
=> m ()
buildSchemaCache = buildSchemaCacheG True
buildSCWithoutSetup
:: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
=> m ()
buildSCWithoutSetup = buildSchemaCacheG False
buildSchemaCacheG
:: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
=> Bool -> m ()
buildSchemaCacheG withSetup = do
-- clean hdb_views
when withSetup $ liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews
-- reset the current schemacache
writeSchemaCache emptySchemaCache
sqlGenCtx <- askSQLGenCtx
-- fetch all catalog metadata
CatalogMetadata tables relationships permissions
eventTriggers remoteSchemas functions fkeys' allowlistDefs
<- liftTx fetchCatalogData
let fkeys = HS.fromList fkeys'
-- tables
forM_ tables $ \ct -> do
let qt = _ctTable ct
isSysDef = _ctSystemDefined ct
tableInfoM = _ctInfo ct
mkInconsObj = InconsistentMetadataObj (MOTable qt)
MOTTable $ toJSON $ TrackTable qt
modifyErr (\e -> "table " <> qt <<> "; " <> e) $
handleInconsistentObj mkInconsObj $ do
ti <- onNothing tableInfoM $ throw400 NotExists $
"no such table/view exists in postgres : " <>> qt
addTableToCache $ ti{tiSystemDefined = isSysDef}
-- relationships
forM_ relationships $ \(CatalogRelation qt rn rt rDef cmnt) -> do
let objId = MOTableObj qt $ MTORel rn rt
def = toJSON $ WithTable qt $ RelDef rn rDef cmnt
mkInconsObj = InconsistentMetadataObj objId (MOTRel rt) def
modifyErr (\e -> "table " <> qt <<> "; rel " <> rn <<> "; " <> e) $
handleInconsistentObj mkInconsObj $
case rt of
ObjRel -> do
using <- decodeValue rDef
let relDef = RelDef rn using Nothing
validateObjRel qt relDef
objRelP2Setup qt fkeys relDef
ArrRel -> do
using <- decodeValue rDef
let relDef = RelDef rn using Nothing
validateArrRel qt relDef
arrRelP2Setup qt fkeys relDef
-- permissions
forM_ permissions $ \(CatalogPermission qt rn pt pDef cmnt) -> do
let objId = MOTableObj qt $ MTOPerm rn pt
def = toJSON $ WithTable qt $ PermDef rn pDef cmnt
mkInconsObj = InconsistentMetadataObj objId (MOTPerm pt) def
modifyErr (\e -> "table " <> qt <<> "; role " <> rn <<> "; " <> e) $
handleInconsistentObj mkInconsObj $
case pt of
PTInsert -> permHelper withSetup sqlGenCtx qt rn pDef PAInsert
PTSelect -> permHelper withSetup sqlGenCtx qt rn pDef PASelect
PTUpdate -> permHelper withSetup sqlGenCtx qt rn pDef PAUpdate
PTDelete -> permHelper withSetup sqlGenCtx qt rn pDef PADelete
-- event triggers
forM_ eventTriggers $ \(CatalogEventTrigger qt trn configuration) -> do
let objId = MOTableObj qt $ MTOTrigger trn
def = object ["table" .= qt, "configuration" .= configuration]
mkInconsObj = InconsistentMetadataObj objId MOTEventTrigger def
handleInconsistentObj mkInconsObj $ do
etc <- decodeValue configuration
subTableP2Setup qt etc
allCols <- getCols . tiFieldInfoMap <$> askTabInfo qt
when withSetup $ liftTx $
mkAllTriggersQ trn qt allCols (stringifyNum sqlGenCtx) (etcDefinition etc)
-- sql functions
forM_ functions $ \(CatalogFunction qf rawfiM) -> do
let def = toJSON $ TrackFunction qf
mkInconsObj =
InconsistentMetadataObj (MOFunction qf) MOTFunction def
modifyErr (\e -> "function " <> qf <<> "; " <> e) $
handleInconsistentObj mkInconsObj $ do
rawfi <- onNothing rawfiM $
throw400 NotExists $ "no such function exists in postgres : " <>> qf
trackFunctionP2Setup qf rawfi
-- allow list
replaceAllowlist $ concatMap _cdQueries allowlistDefs
-- build GraphQL context with tables and functions
GS.buildGCtxMapPG
-- remote schemas
forM_ remoteSchemas resolveSingleRemoteSchema
where
permHelper setup sqlGenCtx qt rn pDef pa = do
qCtx <- mkAdminQCtx sqlGenCtx <$> askSchemaCache
perm <- decodeValue pDef
let permDef = PermDef rn perm Nothing
createPerm = WithTable qt permDef
(permInfo, deps) <- liftP1WithQCtx qCtx $ createPermP1 createPerm
when setup $ addPermP2Setup qt permDef permInfo
addPermToCache qt rn pa permInfo deps
-- p2F qt rn p1Res
resolveSingleRemoteSchema rs = do
let AddRemoteSchemaQuery name _ _ = rs
mkInconsObj = InconsistentMetadataObj (MORemoteSchema name)
MOTRemoteSchema (toJSON rs)
handleInconsistentObj mkInconsObj $ do
rsCtx <- addRemoteSchemaP2Setup rs
sc <- askSchemaCache
let gCtxMap = scGCtxMap sc
withNewTabName newTN = do
let tnGQL = GS.qualObjectToName newTN
defGCtx = scDefaultRemoteGCtx sc
rGCtx = convRemoteGCtx $ rscGCtx rsCtx
mergedGCtxMap <- mergeRemoteSchema gCtxMap rGCtx
mergedDefGCtx <- mergeGCtx defGCtx rGCtx
writeSchemaCache sc { scGCtxMap = mergedGCtxMap
, scDefaultRemoteGCtx = mergedDefGCtx
}
-- check for GraphQL schema conflicts on new name
GS.checkConflictingNode defGCtx tnGQL
void $ procAlteredCols sc tn
-- update new table in catalog
renameTableInCatalog newTN tn
return True
fetchCatalogData :: Q.TxE QErr CatalogMetadata
fetchCatalogData =
(Q.getAltJ . runIdentity . Q.getRow) <$> Q.withQE defaultTxErrorHandler
$(Q.sqlFromFile "src-rsr/catalog_metadata.sql") () True
data RunSQL
= RunSQL
{ rSql :: T.Text
, rCascade :: !(Maybe Bool)
, rCheckMetadataConsistency :: !(Maybe Bool)
} deriving (Show, Eq, Lift)
$(deriveJSON (aesonDrop 1 snakeCase){omitNothingFields=True} ''RunSQL)
data RunSQLRes
= RunSQLRes
{ rrResultType :: !T.Text
, rrResult :: !Value
} deriving (Show, Eq)
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''RunSQLRes)
instance Q.FromRes RunSQLRes where
fromRes (Q.ResultOkEmpty _) =
return $ RunSQLRes "CommandOk" Null
fromRes (Q.ResultOkData res) = do
csvRows <- resToCSV res
return $ RunSQLRes "TuplesOk" $ toJSON csvRows
execRawSQL :: (MonadTx m) => T.Text -> m EncJSON
execRawSQL =
fmap (encJFromJValue @RunSQLRes) .
liftTx . Q.multiQE rawSqlErrHandler . Q.fromText
where
rawSqlErrHandler txe =
let e = err400 PostgresError "query execution failed"
in e {qeInternal = Just $ toJSON txe}
execWithMDCheck
:: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
=> RunSQL -> m EncJSON
execWithMDCheck (RunSQL t cascade _) = do
-- Drop hdb_views so no interference is caused to the sql query
liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews
-- Get the metadata before the sql query, everything, need to filter this
oldMetaU <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta
oldFuncMetaU <-
liftTx $ Q.catchE defaultTxErrorHandler fetchFunctionMeta
-- Run the SQL
res <- execRawSQL t
-- Get the metadata after the sql query
newMeta <- liftTx $ Q.catchE defaultTxErrorHandler fetchTableMeta
newFuncMeta <- liftTx $ Q.catchE defaultTxErrorHandler fetchFunctionMeta
sc <- askSchemaCache
let existingTables = M.keys $ scTables sc
oldMeta = flip filter oldMetaU $ \tm -> tmTable tm `elem` existingTables
schemaDiff = getSchemaDiff oldMeta newMeta
existingFuncs = M.keys $ scFunctions sc
oldFuncMeta = flip filter oldFuncMetaU $ \fm -> funcFromMeta fm `elem` existingFuncs
FunctionDiff droppedFuncs alteredFuncs = getFuncDiff oldFuncMeta newFuncMeta
overloadedFuncs = getOverloadedFuncs existingFuncs newFuncMeta
-- Do not allow overloading functions
unless (null overloadedFuncs) $
throw400 NotSupported $ "the following tracked function(s) cannot be overloaded: "
<> reportFuncs overloadedFuncs
indirectDeps <- getSchemaChangeDeps schemaDiff
-- Report back with an error if cascade is not set
when (indirectDeps /= [] && not (or cascade)) $ reportDepsExt indirectDeps []
-- Purge all the indirect dependents from state
mapM_ purgeDep indirectDeps
-- Purge all dropped functions
let purgedFuncs = flip mapMaybe indirectDeps $ \dep ->
case dep of
SOFunction qf -> Just qf
_ -> Nothing
forM_ (droppedFuncs \\ purgedFuncs) $ \qf -> do
liftTx $ delFunctionFromCatalog qf
delFunctionFromCache qf
-- Process altered functions
forM_ alteredFuncs $ \(qf, newTy) ->
when (newTy == FTVOLATILE) $
throw400 NotSupported $
"type of function " <> qf <<> " is altered to \"VOLATILE\" which is not supported now"
-- update the schema cache and hdb_catalog with the changes
reloadRequired <- processSchemaChanges schemaDiff
let withReload = do -- in case of any rename
buildSchemaCache
newSC <- askSchemaCache
checkNewInconsistentMeta sc newSC
withoutReload = do
postSc <- askSchemaCache
-- recreate the insert permission infra
forM_ (M.elems $ scTables postSc) $ \ti -> do
let tn = tiName ti
forM_ (M.elems $ tiRolePermInfoMap ti) $ \rpi ->
maybe (return ()) (liftTx . buildInsInfra tn) $ _permIns rpi
strfyNum <- stringifyNum <$> askSQLGenCtx
--recreate triggers
forM_ (M.elems $ scTables postSc) $ \ti -> do
let tn = tiName ti
cols = getCols $ tiFieldInfoMap ti
forM_ (M.toList $ tiEventTriggerInfoMap ti) $ \(trn, eti) -> do
let fullspec = etiOpsDef eti
liftTx $ mkAllTriggersQ trn tn cols strfyNum fullspec
bool withoutReload withReload reloadRequired
return res
where
reportFuncs = T.intercalate ", " . map dquoteTxt
isAltrDropReplace :: QErrM m => T.Text -> m Bool
isAltrDropReplace = either throwErr return . matchRegex regex False
where
throwErr s = throw500 $ "compiling regex failed: " <> T.pack s
regex = "alter|drop|replace|create function"
runRunSQL
:: (QErrM m, UserInfoM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
=> RunSQL -> m EncJSON
runRunSQL q@(RunSQL t _ mChkMDCnstcy) = do
adminOnly
isMDChkNeeded <- maybe (isAltrDropReplace t) return mChkMDCnstcy
bool (execRawSQL t) (execWithMDCheck q) isMDChkNeeded
-- Should be used only after checking the status
resToCSV :: PQ.Result -> ExceptT T.Text IO [[T.Text]]
resToCSV r = do
nr <- liftIO $ PQ.ntuples r
nc <- liftIO $ PQ.nfields r
hdr <- forM [0..pred nc] $ \ic -> do
colNameBS <- liftIO $ PQ.fname r ic
maybe (return "unknown") decodeBS colNameBS
rows <- forM [0..pred nr] $ \ir ->
forM [0..pred nc] $ \ic -> do
cellValBS <- liftIO $ PQ.getvalue r ir ic
maybe (return "NULL") decodeBS cellValBS
return $ hdr:rows
maybe withOldTabName withNewTabName mNewName
where
decodeBS = either (throwError . T.pack . show) return . TE.decodeUtf8'
TableDiff mNewName droppedCols addedCols alteredCols _ constraints = tableDiff
replaceConstraints tn = flip modTableInCache tn $ \tInfo ->
return $ tInfo {_tiUniqOrPrimConstraints = constraints}
procDroppedCols tn =
forM_ droppedCols $ \droppedCol ->
-- Drop the column from the cache
delColFromCache droppedCol tn
procAddedCols tn =
-- In the newly added columns check that there is no conflict with relationships
forM_ addedCols $ \rawInfo@(PGRawColumnInfo colName _ _ _) ->
case M.lookup (fromPGCol colName) $ _tiFieldInfoMap ti of
Just (FIRelationship _) ->
throw400 AlreadyExists $ "cannot add column " <> colName
<<> " in table " <> tn <<>
" as a relationship with the name already exists"
_ -> do
info <- processColumnInfoUsingCache tn rawInfo
addColToCache colName info tn
procAlteredCols sc tn = fmap or $ forM alteredCols $
\( PGRawColumnInfo oldName oldType _ _
, newRawInfo@(PGRawColumnInfo newName newType _ _) ) -> do
let performColumnUpdate = do
newInfo <- processColumnInfoUsingCache tn newRawInfo
updColInCache newName newInfo tn
if | oldName /= newName -> renameColInCatalog oldName newName tn ti $> True
| oldType /= newType -> do
let colId = SOTableObj tn $ TOCol oldName
typeDepObjs = getDependentObjsWith (== DROnType) sc colId
unless (null typeDepObjs) $ throw400 DependencyError $
"cannot change type of column " <> oldName <<> " in table "
<> tn <<> " because of the following dependencies : " <>
reportSchemaObjs typeDepObjs
performColumnUpdate
-- If any dependent permissions found with the column whose type being altered is
-- provided with a session variable, then rebuild permission info and update the cache
let sessVarDepObjs = getDependentObjsWith (== DRSessionVariable) sc colId
forM_ sessVarDepObjs $ \case
SOTableObj qt (TOPerm rn pt) -> rebuildPermInfo qt rn pt
_ -> throw500 "unexpected schema dependency found for altering column type"
pure False
| otherwise -> performColumnUpdate $> False
delTableAndDirectDeps
:: (QErrM m, CacheRWM m, MonadTx m) => QualifiedTable -> m ()
delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do
liftTx $ Q.catchE defaultTxErrorHandler $ do
Q.unitQ [Q.sql|
DELETE FROM "hdb_catalog"."hdb_relationship"
WHERE table_schema = $1 AND table_name = $2
|] (sn, tn) False
Q.unitQ [Q.sql|
DELETE FROM "hdb_catalog"."hdb_permission"
WHERE table_schema = $1 AND table_name = $2
|] (sn, tn) False
Q.unitQ [Q.sql|
DELETE FROM "hdb_catalog"."event_triggers"
WHERE schema_name = $1 AND table_name = $2
|] (sn, tn) False
deleteTableFromCatalog qtn
delTableFromCache qtn
-- | Builds an initial @'TableCache' 'PGColumnInfo'@ from catalog information. Does not fill in
-- '_tiRolePermInfoMap' or '_tiEventTriggerInfoMap' at all, and '_tiFieldInfoMap' only contains
-- columns, not relationships; those pieces of information are filled in by later stages.
buildTableCache
:: forall m. (MonadTx m, CacheRWM m)
=> [CatalogTable] -> m (TableCache PGColumnInfo)
buildTableCache = processTableCache <=< buildRawTableCache
where
withTable name = withSchemaObject $
InconsistentMetadataObj (MOTable name) MOTTable (toJSON name)
-- Step 1: Build the raw table cache from metadata information.
buildRawTableCache :: [CatalogTable] -> m (TableCache PGRawColumnInfo)
buildRawTableCache catalogTables = fmap (M.fromList . catMaybes) . for catalogTables $
\(CatalogTable name isSystemDefined isEnum maybeInfo) -> withTable name $ do
catalogInfo <- onNothing maybeInfo $
throw400 NotExists $ "no such table/view exists in postgres: " <>> name
let CatalogTableInfo columns constraints primaryKeyColumnNames viewInfo = catalogInfo
columnFields = M.fromList . flip map columns $ \column ->
(fromPGCol $ prciName column, FIColumn column)
primaryKeyColumns = flip filter columns $ \column ->
prciName column `elem` primaryKeyColumnNames
fetchEnumValues = fetchAndValidateEnumValues name primaryKeyColumns columns
maybeEnumValues <- if isEnum then Just <$> fetchEnumValues else pure Nothing
let info = TableInfo
{ _tiName = name
, _tiSystemDefined = isSystemDefined
, _tiFieldInfoMap = columnFields
, _tiRolePermInfoMap = mempty
, _tiUniqOrPrimConstraints = constraints
, _tiPrimaryKeyCols = primaryKeyColumnNames
, _tiViewInfo = viewInfo
, _tiEventTriggerInfoMap = mempty
, _tiEnumValues = maybeEnumValues }
pure (name, info)
-- Step 2: Process the raw table cache to replace Postgres column types with logical column
-- types.
processTableCache :: TableCache PGRawColumnInfo -> m (TableCache PGColumnInfo)
processTableCache rawTables = fmap (M.mapMaybe id) . for rawTables $ \rawInfo -> do
let tableName = _tiName rawInfo
withTable tableName $ rawInfo
& tiFieldInfoMap.traverse._FIColumn %%~ processColumnInfo enumTables tableName
where
enumTables = M.mapMaybe _tiEnumValues rawTables
-- | “Processes” a 'PGRawColumnInfo' into a 'PGColumnInfo' by resolving its type using a map of known
-- enum tables.
processColumnInfo
:: (QErrM m)
=> M.HashMap QualifiedTable EnumValues -- ^ known enum tables
-> QualifiedTable -- ^ the table this column belongs to
-> PGRawColumnInfo -- ^ the columns raw information
-> m PGColumnInfo
processColumnInfo enumTables tableName rawInfo = do
resolvedType <- resolveColumnType
pure PGColumnInfo
{ pgiName = prciName rawInfo
, pgiType = resolvedType
, pgiIsNullable = prciIsNullable rawInfo }
where
resolveColumnType =
case prciReferences rawInfo of
-- no referenced tables? definitely not an enum
[] -> pure $ PGColumnScalar (prciType rawInfo)
-- one referenced table? might be an enum, so check if the referenced table is an enum
[referencedTableName] -> pure $ M.lookup referencedTableName enumTables & maybe
(PGColumnScalar $ prciType rawInfo)
(PGColumnEnumReference . EnumReference referencedTableName)
-- multiple referenced tables? we could check if any of them are enums, but the schema is
-- strange, so lets just reject it
referencedTables -> throw400 ConstraintViolation
$ "cannot handle exotic schema: column " <> prciName rawInfo <<> " in table "
<> tableName <<> " references multiple foreign tables ("
<> T.intercalate ", " (map dquote referencedTables) <> ")?"
-- | Like 'processColumnInfo', but uses the information in the current schema cache to resolve a
-- columns type.
processColumnInfoUsingCache :: (CacheRM m, QErrM m) => QualifiedTable -> PGRawColumnInfo -> m PGColumnInfo
processColumnInfoUsingCache tableName rawInfo = do
tables <- scTables <$> askSchemaCache
processColumnInfo (M.mapMaybe _tiEnumValues tables) tableName rawInfo

View File

@ -63,7 +63,7 @@ mkSQLCount (CountQueryP1 tn (permFltr, mWc) mDistCols) =
validateCountQWith
:: (UserInfoM m, QErrM m, CacheRM m)
=> SessVarBldr m
-> (PGColType -> Value -> m S.SQLExp)
-> (PGColumnType -> Value -> m S.SQLExp)
-> CountQuery
-> m CountQueryP1
validateCountQWith sessVarBldr prepValBldr (CountQuery qt mDistCols mWhere) = do
@ -73,7 +73,7 @@ validateCountQWith sessVarBldr prepValBldr (CountQuery qt mDistCols mWhere) = do
selPerm <- modifyErr (<> selNecessaryMsg) $
askSelPermInfo tableInfo
let colInfoMap = tiFieldInfoMap tableInfo
let colInfoMap = _tiFieldInfoMap tableInfo
forM_ mDistCols $ \distCols -> do
let distColAsrns = [ checkSelOnCol selPerm

View File

@ -30,7 +30,7 @@ data AnnDelG v
{ dqp1Table :: !QualifiedTable
, dqp1Where :: !(AnnBoolExp v, AnnBoolExp v)
, dqp1MutFlds :: !(MutFldsG v)
, dqp1AllCols :: ![PGColInfo]
, dqp1AllCols :: ![PGColumnInfo]
} deriving (Show, Eq)
traverseAnnDel
@ -60,7 +60,7 @@ mkDeleteCTE (AnnDel tn (fltr, wc) _ _) =
validateDeleteQWith
:: (UserInfoM m, QErrM m, CacheRM m)
=> SessVarBldr m
-> (PGColType -> Value -> m S.SQLExp)
-> (PGColumnType -> Value -> m S.SQLExp)
-> DeleteQuery
-> m AnnDel
validateDeleteQWith sessVarBldr prepValBldr
@ -69,7 +69,7 @@ validateDeleteQWith sessVarBldr prepValBldr
-- If table is view then check if it deletable
mutableView tableName viIsDeletable
(tiViewInfo tableInfo) "deletable"
(_tiViewInfo tableInfo) "deletable"
-- Check if the role has delete permissions
delPerm <- askDelPermInfo tableInfo
@ -81,7 +81,7 @@ validateDeleteQWith sessVarBldr prepValBldr
selPerm <- modifyErr (<> selNecessaryMsg) $
askSelPermInfo tableInfo
let fieldInfoMap = tiFieldInfoMap tableInfo
let fieldInfoMap = _tiFieldInfoMap tableInfo
allCols = getCols fieldInfoMap
-- convert the returning cols into sql returing exp

View File

@ -39,7 +39,7 @@ data InsertQueryP1
, iqp1Tuples :: ![[S.SQLExp]]
, iqp1Conflict :: !(Maybe ConflictClauseP1)
, iqp1MutFlds :: !MutFlds
, iqp1AllCols :: ![PGColInfo]
, iqp1AllCols :: ![PGColumnInfo]
} deriving (Show, Eq)
mkInsertCTE :: InsertQueryP1 -> S.CTE
@ -64,10 +64,10 @@ toSQLConflict conflict = case conflict of
convObj
:: (UserInfoM m, QErrM m)
=> (PGColType -> Value -> m S.SQLExp)
=> (PGColumnType -> Value -> m S.SQLExp)
-> HM.HashMap PGCol S.SQLExp
-> HM.HashMap PGCol S.SQLExp
-> FieldInfoMap
-> FieldInfoMap PGColumnInfo
-> InsObj
-> m ([PGCol], [S.SQLExp])
convObj prepFn defInsVals setInsVals fieldInfoMap insObj = do
@ -99,7 +99,7 @@ validateInpCols inpCols updColsPerm = forM_ inpCols $ \inpCol ->
buildConflictClause
:: (UserInfoM m, QErrM m)
=> SessVarBldr m
-> TableInfo
-> TableInfo PGColumnInfo
-> [PGCol]
-> OnConflict
-> m ConflictClauseP1
@ -131,8 +131,8 @@ buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act)
(Just _, Just _, _) -> throw400 UnexpectedPayload
"'constraint' and 'constraint_on' cannot be set at a time"
where
fieldInfoMap = tiFieldInfoMap tableInfo
toSQLBool = toSQLBoolExp (S.mkQual $ tiName tableInfo)
fieldInfoMap = _tiFieldInfoMap tableInfo
toSQLBool = toSQLBoolExp (S.mkQual $ _tiName tableInfo)
validateCols c = do
let targetcols = getPGCols c
@ -140,11 +140,11 @@ buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act)
\pgCol -> askPGType fieldInfoMap pgCol ""
validateConstraint c = do
let tableConsNames = tiUniqOrPrimConstraints tableInfo
let tableConsNames = _tiUniqOrPrimConstraints tableInfo
withPathK "constraint" $
unless (c `elem` tableConsNames) $
throw400 Unexpected $ "constraint " <> getConstraintTxt c
<<> " for table " <> tiName tableInfo
<<> " for table " <> _tiName tableInfo
<<> " does not exist"
getUpdPerm = do
@ -160,7 +160,7 @@ convInsertQuery
:: (UserInfoM m, QErrM m, CacheRM m)
=> (Value -> m [InsObj])
-> SessVarBldr m
-> (PGColType -> Value -> m S.SQLExp)
-> (PGColumnType -> Value -> m S.SQLExp)
-> InsertQuery
-> m InsertQueryP1
convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRetCols) = do
@ -172,7 +172,7 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRet
-- If table is view then check if it is insertable
mutableView tableName viIsInsertable
(tiViewInfo tableInfo) "insertable"
(_tiViewInfo tableInfo) "insertable"
-- Check if the role has insert permissions
insPerm <- askInsPermInfo tableInfo
@ -180,7 +180,7 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRet
-- Check if all dependent headers are present
validateHeaders $ ipiRequiredHeaders insPerm
let fieldInfoMap = tiFieldInfoMap tableInfo
let fieldInfoMap = _tiFieldInfoMap tableInfo
setInsVals = ipiSet insPerm
-- convert the returning cols into sql returing exp

View File

@ -1,22 +1,22 @@
module Hasura.RQL.DML.Internal where
import qualified Database.PG.Query as Q
import qualified Database.PG.Query.Connection as Q
import qualified Hasura.SQL.DML as S
import qualified Database.PG.Query as Q
import qualified Hasura.SQL.DML as S
import Hasura.Prelude
import Hasura.RQL.GBoolExp
import Hasura.RQL.Types
import Hasura.SQL.Error
import Hasura.SQL.Types
import Hasura.SQL.Value
import Control.Lens
import Data.Aeson.Types
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS
import qualified Data.Sequence as DS
import qualified Data.Text as T
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as HS
import qualified Data.Sequence as DS
import qualified Data.Text as T
newtype DMLP1 a
= DMLP1 {unDMLP1 :: StateT (DS.Seq Q.PrepArg) P1 a}
@ -41,13 +41,13 @@ instance UserInfoM DMLP1 where
instance HasSQLGenCtx DMLP1 where
askSQLGenCtx = DMLP1 $ lift askSQLGenCtx
mkAdminRolePermInfo :: TableInfo -> RolePermInfo
mkAdminRolePermInfo :: TableInfo PGColumnInfo -> RolePermInfo
mkAdminRolePermInfo ti =
RolePermInfo (Just i) (Just s) (Just u) (Just d)
where
pgCols = map pgiName $ getCols $ tiFieldInfoMap ti
pgCols = map pgiName $ getCols $ _tiFieldInfoMap ti
tn = tiName ti
tn = _tiName ti
i = InsPermInfo (HS.fromList pgCols) tn annBoolExpTrue M.empty []
s = SelPermInfo (HS.fromList pgCols) tn annBoolExpTrue
Nothing True []
@ -57,14 +57,14 @@ mkAdminRolePermInfo ti =
askPermInfo'
:: (UserInfoM m)
=> PermAccessor c
-> TableInfo
-> TableInfo PGColumnInfo
-> m (Maybe c)
askPermInfo' pa tableInfo = do
roleName <- askCurRole
let mrpi = getRolePermInfo roleName
return $ mrpi >>= (^. permAccToLens pa)
where
rpim = tiRolePermInfoMap tableInfo
rpim = _tiRolePermInfoMap tableInfo
getRolePermInfo roleName
| roleName == adminRole = Just $ mkAdminRolePermInfo tableInfo
| otherwise = M.lookup roleName rpim
@ -72,7 +72,7 @@ askPermInfo' pa tableInfo = do
askPermInfo
:: (UserInfoM m, QErrM m)
=> PermAccessor c
-> TableInfo
-> TableInfo PGColumnInfo
-> m c
askPermInfo pa tableInfo = do
roleName <- askCurRole
@ -80,38 +80,38 @@ askPermInfo pa tableInfo = do
case mPermInfo of
Just c -> return c
Nothing -> throw400 PermissionDenied $ mconcat
[ pt <> " on " <>> tiName tableInfo
[ pt <> " on " <>> _tiName tableInfo
, " for role " <>> roleName
, " is not allowed. "
]
where
pt = permTypeToCode $ permAccToType pa
isTabUpdatable :: RoleName -> TableInfo -> Bool
isTabUpdatable :: RoleName -> TableInfo PGColumnInfo -> Bool
isTabUpdatable role ti
| role == adminRole = True
| otherwise = isJust $ M.lookup role rpim >>= _permUpd
where
rpim = tiRolePermInfoMap ti
rpim = _tiRolePermInfoMap ti
askInsPermInfo
:: (UserInfoM m, QErrM m)
=> TableInfo -> m InsPermInfo
=> TableInfo PGColumnInfo -> m InsPermInfo
askInsPermInfo = askPermInfo PAInsert
askSelPermInfo
:: (UserInfoM m, QErrM m)
=> TableInfo -> m SelPermInfo
=> TableInfo PGColumnInfo -> m SelPermInfo
askSelPermInfo = askPermInfo PASelect
askUpdPermInfo
:: (UserInfoM m, QErrM m)
=> TableInfo -> m UpdPermInfo
=> TableInfo PGColumnInfo -> m UpdPermInfo
askUpdPermInfo = askPermInfo PAUpdate
askDelPermInfo
:: (UserInfoM m, QErrM m)
=> TableInfo -> m DelPermInfo
=> TableInfo PGColumnInfo -> m DelPermInfo
askDelPermInfo = askPermInfo PADelete
verifyAsrns :: (MonadError QErr m) => [a -> m ()] -> [a] -> m ()
@ -142,27 +142,27 @@ checkPermOnCol pt allowedCols pgCol = do
]
binRHSBuilder
:: PGColType -> Value -> DMLP1 S.SQLExp
:: PGColumnType -> Value -> DMLP1 S.SQLExp
binRHSBuilder colType val = do
preparedArgs <- get
binVal <- runAesonParser (convToBin colType) val
put (preparedArgs DS.|> binVal)
return $ toPrepParam (DS.length preparedArgs + 1) colType
scalarValue <- parsePGScalarValue colType val
put (preparedArgs DS.|> toBinaryValue scalarValue)
return $ toPrepParam (DS.length preparedArgs + 1) (pstType scalarValue)
fetchRelTabInfo
:: (QErrM m, CacheRM m)
=> QualifiedTable
-> m TableInfo
-> m (TableInfo PGColumnInfo)
fetchRelTabInfo refTabName =
-- Internal error
modifyErrAndSet500 ("foreign " <> ) $ askTabInfo refTabName
type SessVarBldr m = PgType -> SessVar -> m S.SQLExp
type SessVarBldr m = PGType PGScalarType -> SessVar -> m S.SQLExp
fetchRelDet
:: (UserInfoM m, QErrM m, CacheRM m)
=> RelName -> QualifiedTable
-> m (FieldInfoMap, SelPermInfo)
-> m (FieldInfoMap PGColumnInfo, SelPermInfo)
fetchRelDet relName refTabName = do
roleName <- askCurRole
-- Internal error
@ -171,7 +171,7 @@ fetchRelDet relName refTabName = do
refSelPerm <- modifyErr (relPermErr refTabName roleName) $
askSelPermInfo refTabInfo
return (tiFieldInfoMap refTabInfo, refSelPerm)
return (_tiFieldInfoMap refTabInfo, refSelPerm)
where
relPermErr rTable roleName _ =
mconcat
@ -188,7 +188,7 @@ checkOnColExp
-> AnnBoolExpFldSQL
-> m AnnBoolExpFldSQL
checkOnColExp spi sessVarBldr annFld = case annFld of
AVCol (PGColInfo cn _ _) _ -> do
AVCol (PGColumnInfo cn _ _) _ -> do
checkSelOnCol spi cn
return annFld
AVRel relInfo nesAnn -> do
@ -215,16 +215,16 @@ convPartialSQLExp f = \case
PSESessVar colTy sessVar -> f colTy sessVar
sessVarFromCurrentSetting
:: (Applicative f) => PgType -> SessVar -> f S.SQLExp
:: (Applicative f) => PGType PGScalarType -> SessVar -> f S.SQLExp
sessVarFromCurrentSetting pgType sessVar =
pure $ sessVarFromCurrentSetting' pgType sessVar
sessVarFromCurrentSetting' :: PgType -> SessVar -> S.SQLExp
sessVarFromCurrentSetting' :: PGType PGScalarType -> SessVar -> S.SQLExp
sessVarFromCurrentSetting' ty sessVar =
flip S.SETyAnn (S.mkTypeAnn ty) $
case ty of
PgTypeSimple baseTy -> withGeoVal baseTy sessVarVal
PgTypeArray _ -> sessVarVal
PGTypeScalar baseTy -> withGeoVal baseTy sessVarVal
PGTypeArray _ -> sessVarVal
where
curSess = S.SEUnsafe "current_setting('hasura.user')::json"
sessVarVal = S.SEOpApp (S.SQLOp "->>")
@ -241,41 +241,45 @@ checkSelPerm spi sessVarBldr =
convBoolExp
:: (UserInfoM m, QErrM m, CacheRM m)
=> FieldInfoMap
=> FieldInfoMap PGColumnInfo
-> SelPermInfo
-> BoolExp
-> SessVarBldr m
-> (PGColType -> Value -> m S.SQLExp)
-> (PGColumnType -> Value -> m S.SQLExp)
-> m AnnBoolExpSQL
convBoolExp cim spi be sessVarBldr prepValBldr = do
abe <- annBoolExp rhsParser cim be
checkSelPerm spi sessVarBldr abe
where
rhsParser pgType val = case pgType of
PgTypeSimple ty -> prepValBldr ty val
PgTypeArray ofTy -> do
-- for arrays we don't use the prepared builder
PGTypeScalar ty -> prepValBldr ty val
PGTypeArray ofTy -> do
-- for arrays, we don't use the prepared builder
vals <- runAesonParser parseJSON val
arrayExp <- S.SEArray <$> indexedForM vals (txtRHSBuilder ofTy)
return $ S.SETyAnn arrayExp $ S.mkTypeAnn pgType
WithScalarType scalarType scalarValues <- parsePGScalarValues ofTy vals
return $ S.SETyAnn
(S.SEArray $ map (toTxtValue . WithScalarType scalarType) scalarValues)
(S.mkTypeAnn $ PGTypeArray scalarType)
dmlTxErrorHandler :: Q.PGTxErr -> QErr
dmlTxErrorHandler p2Res =
case err of
Nothing -> defaultTxErrorHandler p2Res
Just (code, msg) -> err400 code msg
where err = simplifyError p2Res
dmlTxErrorHandler = mkTxErrorHandler $ \case
PGIntegrityConstraintViolation _ -> True
PGDataException _ -> True
PGSyntaxErrorOrAccessRuleViolation (Just (PGErrorSpecific code)) -> code `elem`
[ PGUndefinedObject
, PGInvalidColumnReference ]
_ -> False
toJSONableExp :: Bool -> PGColType -> S.SQLExp -> S.SQLExp
toJSONableExp :: Bool -> PGColumnType -> S.SQLExp -> S.SQLExp
toJSONableExp strfyNum colTy expn
| colTy == PGGeometry || colTy == PGGeography =
| isScalarColumnWhere isGeoType colTy =
S.SEFnApp "ST_AsGeoJSON"
[ expn
, S.SEUnsafe "15" -- max decimal digits
, S.SEUnsafe "4" -- to print out crs
] Nothing
`S.SETyAnn` S.jsonTypeAnn
| isBigNum colTy && strfyNum =
| isScalarColumnWhere isBigNum colTy && strfyNum =
expn `S.SETyAnn` S.textTypeAnn
| otherwise = expn
@ -287,45 +291,6 @@ validateHeaders depHeaders = do
unless (hdr `elem` map T.toLower headers) $
throw400 NotFound $ hdr <<> " header is expected but not found"
simplifyError :: Q.PGTxErr -> Maybe (Code, T.Text)
simplifyError txErr = do
stmtErr <- Q.getPGStmtErr txErr
codeMsg <- getPGCodeMsg stmtErr
extractError codeMsg
where
getPGCodeMsg pged =
(,) <$> Q.edStatusCode pged <*> Q.edMessage pged
extractError = \case
-- restrict violation
("23001", msg) ->
return (ConstraintViolation, "Can not delete or update due to data being referred. " <> msg)
-- not null violation
("23502", msg) ->
return (ConstraintViolation, "Not-NULL violation. " <> msg)
-- foreign key violation
("23503", msg) ->
return (ConstraintViolation, "Foreign key violation. " <> msg)
-- unique violation
("23505", msg) ->
return (ConstraintViolation, "Uniqueness violation. " <> msg)
-- check violation
("23514", msg) ->
return (PermissionError, "Check constraint violation. " <> msg)
-- invalid text representation
("22P02", msg) -> return (DataException, msg)
-- invalid parameter value
("22023", msg) -> return (DataException, msg)
-- no unique constraint on the columns
("42P10", _) ->
return (ConstraintError, "there is no unique or exclusion constraint on target column(s)")
-- no constraint
("42704", msg) -> return (ConstraintError, msg)
-- invalid input values
("22007", msg) -> return (DataException, msg)
-- invalid escape sequence
("22025", msg) -> return (BadRequest, msg)
_ -> Nothing
-- validate limit and offset int values
onlyPositiveInt :: MonadError QErr m => Int -> m ()
onlyPositiveInt i = when (i < 0) $ throw400 NotSupported

View File

@ -27,7 +27,7 @@ data Mutation
{ _mTable :: !QualifiedTable
, _mQuery :: !(S.CTE, DS.Seq Q.PrepArg)
, _mFields :: !MutFlds
, _mCols :: ![PGColInfo]
, _mCols :: ![PGColumnInfo]
, _mStrfyNum :: !Bool
} deriving (Show, Eq)
@ -57,7 +57,7 @@ mutateAndSel (Mutation qt q mutFlds allCols strfyNum) = do
mutateAndFetchCols
:: QualifiedTable
-> [PGColInfo]
-> [PGColumnInfo]
-> (S.CTE, DS.Seq Q.PrepArg)
-> Bool
-> Q.TxE QErr MutateResp
@ -89,7 +89,7 @@ mutateAndFetchCols qt cols (cte, p) strfyNum =
mkSelCTEFromColVals
:: MonadError QErr m
=> QualifiedTable -> [PGColInfo] -> [ColVals] -> m S.CTE
=> QualifiedTable -> [PGColumnInfo] -> [ColVals] -> m S.CTE
mkSelCTEFromColVals qt allCols colVals =
S.CTESelect <$> case colVals of
[] -> return selNoRows
@ -108,7 +108,7 @@ mkSelCTEFromColVals qt allCols colVals =
let pgCol = pgiName ci
val <- onNothing (Map.lookup pgCol colVal) $
throw500 $ "column " <> pgCol <<> " not found in returning values"
runAesonParser (convToTxt (pgiType ci)) val
toTxtValue <$> parsePGScalarValue (pgiType ci) val
selNoRows =
S.mkSelect { S.selExtr = [S.selectStar]

View File

@ -50,24 +50,24 @@ hasNestedFld = any isNestedMutFld
FArr _ -> True
_ -> False
pgColsFromMutFld :: MutFld -> [(PGCol, PGColType)]
pgColsFromMutFld :: MutFld -> [(PGCol, PGColumnType)]
pgColsFromMutFld = \case
MCount -> []
MExp _ -> []
MRet selFlds ->
flip mapMaybe selFlds $ \(_, annFld) -> case annFld of
FCol (PGColInfo col colTy _) _ -> Just (col, colTy)
_ -> Nothing
FCol (PGColumnInfo col colTy _) _ -> Just (col, colTy)
_ -> Nothing
pgColsFromMutFlds :: MutFlds -> [(PGCol, PGColType)]
pgColsFromMutFlds :: MutFlds -> [(PGCol, PGColumnType)]
pgColsFromMutFlds = concatMap (pgColsFromMutFld . snd)
pgColsToSelFlds :: [PGColInfo] -> [(FieldName, AnnFld)]
pgColsToSelFlds :: [PGColumnInfo] -> [(FieldName, AnnFld)]
pgColsToSelFlds cols =
flip map cols $
\pgColInfo -> (fromPGCol $ pgiName pgColInfo, FCol pgColInfo Nothing)
mkDefaultMutFlds :: Maybe [PGColInfo] -> MutFlds
mkDefaultMutFlds :: Maybe [PGColumnInfo] -> MutFlds
mkDefaultMutFlds = \case
Nothing -> mutFlds
Just cols -> ("returning", MRet $ pgColsToSelFlds cols):mutFlds
@ -111,10 +111,10 @@ mkSelWith qt cte mutFlds singleObj strfyNum =
checkRetCols
:: (UserInfoM m, QErrM m)
=> FieldInfoMap
=> FieldInfoMap PGColumnInfo
-> SelPermInfo
-> [PGCol]
-> m [PGColInfo]
-> m [PGColumnInfo]
checkRetCols fieldInfoMap selPermInfo cols = do
mapM_ (checkSelOnCol selPermInfo) cols
forM cols $ \col -> askPGColInfo fieldInfoMap col relInRetErr

View File

@ -30,7 +30,7 @@ import qualified Database.PG.Query as Q
import qualified Hasura.SQL.DML as S
convSelCol :: (UserInfoM m, QErrM m, CacheRM m)
=> FieldInfoMap
=> FieldInfoMap PGColumnInfo
-> SelPermInfo
-> SelCol
-> m [ExtCol]
@ -50,7 +50,7 @@ convSelCol fieldInfoMap spi (SCStar wildcard) =
convWildcard
:: (UserInfoM m, QErrM m, CacheRM m)
=> FieldInfoMap
=> FieldInfoMap PGColumnInfo
-> SelPermInfo
-> Wildcard
-> m [ExtCol]
@ -71,14 +71,14 @@ convWildcard fieldInfoMap (SelPermInfo cols _ _ _ _ _) wildcard =
mRelSelPerm <- askPermInfo' PASelect relTabInfo
forM mRelSelPerm $ \rspi -> do
rExtCols <- convWildcard (tiFieldInfoMap relTabInfo) rspi wc
rExtCols <- convWildcard (_tiFieldInfoMap relTabInfo) rspi wc
return $ ECRel relName Nothing $
SelectG rExtCols Nothing Nothing Nothing Nothing
relExtCols wc = mapM (mkRelCol wc) relColInfos
resolveStar :: (UserInfoM m, QErrM m, CacheRM m)
=> FieldInfoMap
=> FieldInfoMap PGColumnInfo
-> SelPermInfo
-> SelectQ
-> m SelectQExt
@ -105,7 +105,7 @@ resolveStar fim spi (SelectG selCols mWh mOb mLt mOf) = do
convOrderByElem
:: (UserInfoM m, QErrM m, CacheRM m)
=> SessVarBldr m
-> (FieldInfoMap, SelPermInfo)
-> (FieldInfoMap PGColumnInfo, SelPermInfo)
-> OrderByCol
-> m AnnObCol
convOrderByElem sessVarBldr (flds, spi) = \case
@ -115,7 +115,7 @@ convOrderByElem sessVarBldr (flds, spi) = \case
FIColumn colInfo -> do
checkSelOnCol spi (pgiName colInfo)
let ty = pgiType colInfo
if ty == PGGeography || ty == PGGeometry
if isScalarColumnWhere isGeoType ty
then throw400 UnexpectedPayload $ mconcat
[ fldName <<> " has type 'geometry'"
, " and cannot be used in order_by"
@ -145,11 +145,11 @@ convOrderByElem sessVarBldr (flds, spi) = \case
convSelectQ
:: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m)
=> FieldInfoMap -- Table information of current table
=> FieldInfoMap PGColumnInfo -- Table information of current table
-> SelPermInfo -- Additional select permission info
-> SelectQExt -- Given Select Query
-> SessVarBldr m
-> (PGColType -> Value -> m S.SQLExp)
-> (PGColumnType -> Value -> m S.SQLExp)
-> m AnnSimpleSel
convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do
@ -200,10 +200,10 @@ convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do
convExtSimple
:: (UserInfoM m, QErrM m)
=> FieldInfoMap
=> FieldInfoMap PGColumnInfo
-> SelPermInfo
-> PGCol
-> m PGColInfo
-> m PGColumnInfo
convExtSimple fieldInfoMap selPermInfo pgCol = do
checkSelOnCol selPermInfo pgCol
askPGColInfo fieldInfoMap pgCol relWhenPGErr
@ -212,12 +212,12 @@ convExtSimple fieldInfoMap selPermInfo pgCol = do
convExtRel
:: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m)
=> FieldInfoMap
=> FieldInfoMap PGColumnInfo
-> RelName
-> Maybe RelName
-> SelectQExt
-> SessVarBldr m
-> (PGColType -> Value -> m S.SQLExp)
-> (PGColumnType -> Value -> m S.SQLExp)
-> m (Either ObjSel ArrSel)
convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do
-- Point to the name key
@ -250,15 +250,15 @@ convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do
convSelectQuery
:: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m)
=> SessVarBldr m
-> (PGColType -> Value -> m S.SQLExp)
-> (PGColumnType -> Value -> m S.SQLExp)
-> SelectQuery
-> m AnnSimpleSel
convSelectQuery sessVarBldr prepArgBuilder (DMLQuery qt selQ) = do
tabInfo <- withPathK "table" $ askTabInfo qt
selPermInfo <- askSelPermInfo tabInfo
extSelQ <- resolveStar (tiFieldInfoMap tabInfo) selPermInfo selQ
extSelQ <- resolveStar (_tiFieldInfoMap tabInfo) selPermInfo selQ
validateHeaders $ spiRequiredHeaders selPermInfo
convSelectQ (tiFieldInfoMap tabInfo) selPermInfo
convSelectQ (_tiFieldInfoMap tabInfo) selPermInfo
extSelQ sessVarBldr prepArgBuilder
mkFuncSelectSimple

View File

@ -203,7 +203,7 @@ buildJsonObject pfx parAls arrRelCtx strfyNum flds =
ANIField (fldAls, arrSel)
in S.mkQIdenExp arrPfx fldAls
toSQLCol :: PGColInfo -> Maybe ColOp -> S.SQLExp
toSQLCol :: PGColumnInfo -> Maybe ColOp -> S.SQLExp
toSQLCol col colOpM =
toJSONableExp strfyNum (pgiType col) $ case colOpM of
Nothing -> colNameExp

View File

@ -49,7 +49,7 @@ data AnnAggOrdBy
deriving (Show, Eq)
data AnnObColG v
= AOCPG !PGColInfo
= AOCPG !PGColumnInfo
| AOCObj !RelInfo !(AnnBoolExp v) !(AnnObColG v)
| AOCAgg !RelInfo !(AnnBoolExp v) !AnnAggOrdBy
deriving (Show, Eq)
@ -121,7 +121,7 @@ data ColOp
} deriving (Show, Eq)
data AnnFldG v
= FCol !PGColInfo !(Maybe ColOp)
= FCol !PGColumnInfo !(Maybe ColOp)
| FObj !(ObjSelG v)
| FArr !(ArrSelG v)
| FExp !T.Text

View File

@ -36,7 +36,7 @@ data AnnUpdG v
-- however the session variable can still be
-- converted as desired
, uqp1MutFlds :: !(MutFldsG v)
, uqp1AllCols :: ![PGColInfo]
, uqp1AllCols :: ![PGColumnInfo]
} deriving (Show, Eq)
traverseAnnUpd
@ -67,9 +67,9 @@ mkUpdateCTE (AnnUpd tn setExps (permFltr, wc) _ _) =
convInc
:: (QErrM m)
=> (PGColType -> Value -> m S.SQLExp)
=> (PGColumnType -> Value -> m S.SQLExp)
-> PGCol
-> PGColType
-> PGColumnType
-> Value
-> m (PGCol, S.SQLExp)
convInc f col colType val = do
@ -78,9 +78,9 @@ convInc f col colType val = do
convMul
:: (QErrM m)
=> (PGColType -> Value -> m S.SQLExp)
=> (PGColumnType -> Value -> m S.SQLExp)
-> PGCol
-> PGColType
-> PGColumnType
-> Value
-> m (PGCol, S.SQLExp)
convMul f col colType val = do
@ -89,25 +89,25 @@ convMul f col colType val = do
convSet
:: (QErrM m)
=> (PGColType -> Value -> m S.SQLExp)
=> (PGColumnType -> Value -> m S.SQLExp)
-> PGCol
-> PGColType
-> PGColumnType
-> Value
-> m (PGCol, S.SQLExp)
convSet f col colType val = do
prepExp <- f colType val
return (col, prepExp)
convDefault :: (Monad m) => PGCol -> PGColType -> () -> m (PGCol, S.SQLExp)
convDefault :: (Monad m) => PGCol -> PGColumnType -> () -> m (PGCol, S.SQLExp)
convDefault col _ _ = return (col, S.SEUnsafe "DEFAULT")
convOp
:: (UserInfoM m, QErrM m)
=> FieldInfoMap
=> FieldInfoMap PGColumnInfo
-> [PGCol]
-> UpdPermInfo
-> [(PGCol, a)]
-> (PGCol -> PGColType -> a -> m (PGCol, S.SQLExp))
-> (PGCol -> PGColumnType -> a -> m (PGCol, S.SQLExp))
-> m [(PGCol, S.SQLExp)]
convOp fieldInfoMap preSetCols updPerm objs conv =
forM objs $ \(pgCol, a) -> do
@ -129,7 +129,7 @@ convOp fieldInfoMap preSetCols updPerm objs conv =
validateUpdateQueryWith
:: (UserInfoM m, QErrM m, CacheRM m)
=> SessVarBldr m
-> (PGColType -> Value -> m S.SQLExp)
-> (PGColumnType -> Value -> m S.SQLExp)
-> UpdateQuery
-> m AnnUpd
validateUpdateQueryWith sessVarBldr prepValBldr uq = do
@ -138,7 +138,7 @@ validateUpdateQueryWith sessVarBldr prepValBldr uq = do
-- If it is view then check if it is updatable
mutableView tableName viIsUpdatable
(tiViewInfo tableInfo) "updatable"
(_tiViewInfo tableInfo) "updatable"
-- Check if the role has update permissions
updPerm <- askUpdPermInfo tableInfo
@ -150,7 +150,7 @@ validateUpdateQueryWith sessVarBldr prepValBldr uq = do
selPerm <- modifyErr (<> selNecessaryMsg) $
askSelPermInfo tableInfo
let fieldInfoMap = tiFieldInfoMap tableInfo
let fieldInfoMap = _tiFieldInfoMap tableInfo
allCols = getCols fieldInfoMap
preSetObj = upiSet updPerm
preSetCols = M.keys preSetObj

View File

@ -2,14 +2,11 @@ module Hasura.RQL.GBoolExp
( toSQLBoolExp
, getBoolExpDeps
, annBoolExp
, txtRHSBuilder
, pgValParser
) where
import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.SQL.Types
import Hasura.SQL.Value
import qualified Hasura.SQL.DML as S
@ -21,16 +18,16 @@ import qualified Data.HashMap.Strict as M
import qualified Data.Text.Extended as T
type OpRhsParser m v =
PgType -> Value -> m v
PGType PGColumnType -> Value -> m v
-- | Represents a reference to a Postgres column, possibly casted an arbitrary
-- number of times. Used within 'parseOperationsExpression' for bookkeeping.
data ColumnReference
= ColumnReferenceColumn !PGColInfo
| ColumnReferenceCast !ColumnReference !PGColType
= ColumnReferenceColumn !PGColumnInfo
| ColumnReferenceCast !ColumnReference !PGColumnType
deriving (Show, Eq)
columnReferenceType :: ColumnReference -> PGColType
columnReferenceType :: ColumnReference -> PGColumnType
columnReferenceType = \case
ColumnReferenceColumn column -> pgiType column
ColumnReferenceCast _ targetType -> targetType
@ -40,14 +37,14 @@ instance DQuote ColumnReference where
ColumnReferenceColumn column ->
getPGColTxt $ pgiName column
ColumnReferenceCast reference targetType ->
dquoteTxt reference <> "::" <> T.pack (show targetType)
dquoteTxt reference <> "::" <> dquoteTxt targetType
parseOperationsExpression
:: forall m v
. (MonadError QErr m)
=> OpRhsParser m v
-> FieldInfoMap
-> PGColInfo
-> FieldInfoMap PGColumnInfo
-> PGColumnInfo
-> Value
-> m [OpExpG v]
parseOperationsExpression rhsParser fim columnInfo =
@ -59,7 +56,7 @@ parseOperationsExpression rhsParser fim columnInfo =
Object o -> mapM (parseOperation column) (M.toList o)
val -> pure . AEQ False <$> rhsParser columnType val
where
columnType = PgTypeSimple $ columnReferenceType column
columnType = PGTypeScalar $ columnReferenceType column
parseOperation :: ColumnReference -> (T.Text, Value) -> m (OpExpG v)
parseOperation column (opStr, val) = withPathK opStr $
@ -114,17 +111,17 @@ parseOperationsExpression rhsParser fim columnInfo =
"_is_null" -> parseIsNull
-- jsonb type
"_contains" -> jsonbOnlyOp $ AContains <$> parseOne
"$contains" -> jsonbOnlyOp $ AContains <$> parseOne
"_contained_in" -> jsonbOnlyOp $ AContainedIn <$> parseOne
"$contained_in" -> jsonbOnlyOp $ AContainedIn <$> parseOne
"_has_key" -> jsonbOnlyOp $ AHasKey <$> parseWithTy PGText
"$has_key" -> jsonbOnlyOp $ AHasKey <$> parseWithTy PGText
"_contains" -> guardType [PGJSONB] >> AContains <$> parseOne
"$contains" -> guardType [PGJSONB] >> AContains <$> parseOne
"_contained_in" -> guardType [PGJSONB] >> AContainedIn <$> parseOne
"$contained_in" -> guardType [PGJSONB] >> AContainedIn <$> parseOne
"_has_key" -> guardType [PGJSONB] >> AHasKey <$> parseWithTy (PGColumnScalar PGText)
"$has_key" -> guardType [PGJSONB] >> AHasKey <$> parseWithTy (PGColumnScalar PGText)
"_has_keys_any" -> jsonbOnlyOp $ AHasKeysAny <$> parseManyWithType PGText
"$has_keys_any" -> jsonbOnlyOp $ AHasKeysAny <$> parseManyWithType PGText
"_has_keys_all" -> jsonbOnlyOp $ AHasKeysAll <$> parseManyWithType PGText
"$has_keys_all" -> jsonbOnlyOp $ AHasKeysAll <$> parseManyWithType PGText
"_has_keys_any" -> guardType [PGJSONB] >> AHasKeysAny <$> parseManyWithType (PGColumnScalar PGText)
"$has_keys_any" -> guardType [PGJSONB] >> AHasKeysAny <$> parseManyWithType (PGColumnScalar PGText)
"_has_keys_all" -> guardType [PGJSONB] >> AHasKeysAll <$> parseManyWithType (PGColumnScalar PGText)
"$has_keys_all" -> guardType [PGJSONB] >> AHasKeysAll <$> parseManyWithType (PGColumnScalar PGText)
-- geometry types
"_st_contains" -> parseGeometryOp ASTContains
@ -177,12 +174,12 @@ parseOperationsExpression rhsParser fim columnInfo =
parseLt = ALT <$> parseOne -- <
parseGte = AGTE <$> parseOne -- >=
parseLte = ALTE <$> parseOne -- <=
parseLike = textOnlyOp colTy >> ALIKE <$> parseOne
parseNlike = textOnlyOp colTy >> ANLIKE <$> parseOne
parseIlike = textOnlyOp colTy >> AILIKE <$> parseOne
parseNilike = textOnlyOp colTy >> ANILIKE <$> parseOne
parseSimilar = textOnlyOp colTy >> ASIMILAR <$> parseOne
parseNsimilar = textOnlyOp colTy >> ANSIMILAR <$> parseOne
parseLike = guardType stringTypes >> ALIKE <$> parseOne
parseNlike = guardType stringTypes >> ANLIKE <$> parseOne
parseIlike = guardType stringTypes >> AILIKE <$> parseOne
parseNilike = guardType stringTypes >> ANILIKE <$> parseOne
parseSimilar = guardType stringTypes >> ASIMILAR <$> parseOne
parseNsimilar = guardType stringTypes >> ANSIMILAR <$> parseOne
parseIsNull = bool ANISNOTNULL ANISNULL -- is null
<$> parseVal
@ -199,7 +196,7 @@ parseOperationsExpression rhsParser fim columnInfo =
parsedCastOperations <-
forM (M.toList castOperations) $ \(targetTypeName, castedComparisons) -> do
let targetType = txtToPgColTy targetTypeName
castedColumn = ColumnReferenceCast column targetType
castedColumn = ColumnReferenceCast column (PGColumnScalar targetType)
checkValidCast targetType
parsedCastedComparisons <- withPathK targetTypeName $
parseOperations castedColumn castedComparisons
@ -207,31 +204,27 @@ parseOperationsExpression rhsParser fim columnInfo =
return . ACast $ M.fromList parsedCastOperations
checkValidCast targetType = case (colTy, targetType) of
(PGGeometry, PGGeography) -> return ()
(PGGeography, PGGeometry) -> return ()
(PGColumnScalar PGGeometry, PGGeography) -> return ()
(PGColumnScalar PGGeography, PGGeometry) -> return ()
_ -> throw400 UnexpectedPayload $
"cannot cast column of type " <> colTy <<> " to type " <>> targetType
jsonbOnlyOp m = case colTy of
PGJSONB -> m
ty -> throwError $ buildMsg ty [PGJSONB]
parseGeometryOp f =
geometryOp colTy >> f <$> parseOneNoSess colTy val
guardType [PGGeometry] >> f <$> parseOneNoSess colTy val
parseGeometryOrGeographyOp f =
geometryOrGeographyOp colTy >> f <$> parseOneNoSess colTy val
guardType geoTypes >> f <$> parseOneNoSess colTy val
parseSTDWithinObj = case colTy of
PGGeometry -> do
PGColumnScalar PGGeometry -> do
DWithinGeomOp distVal fromVal <- parseVal
dist <- withPathK "distance" $ parseOneNoSess PGFloat distVal
dist <- withPathK "distance" $ parseOneNoSess (PGColumnScalar PGFloat) distVal
from <- withPathK "from" $ parseOneNoSess colTy fromVal
return $ ASTDWithinGeom $ DWithinGeomOp dist from
PGGeography -> do
PGColumnScalar PGGeography -> do
DWithinGeogOp distVal fromVal sphVal <- parseVal
dist <- withPathK "distance" $ parseOneNoSess PGFloat distVal
dist <- withPathK "distance" $ parseOneNoSess (PGColumnScalar PGFloat) distVal
from <- withPathK "from" $ parseOneNoSess colTy fromVal
useSpheroid <- withPathK "use_spheroid" $ parseOneNoSess PGBoolean sphVal
useSpheroid <- withPathK "use_spheroid" $ parseOneNoSess (PGColumnScalar PGBoolean) sphVal
return $ ASTDWithinGeog $ DWithinGeogOp dist from useSpheroid
_ -> throwError $ buildMsg colTy [PGGeometry, PGGeography]
@ -246,40 +239,23 @@ parseOperationsExpression rhsParser fim columnInfo =
"incompatible column types : " <> column <<> ", " <>> rhsCol
else return rhsCol
geometryOp PGGeometry = return ()
geometryOp ty =
throwError $ buildMsg ty [PGGeometry]
geometryOrGeographyOp PGGeometry = return ()
geometryOrGeographyOp PGGeography = return ()
geometryOrGeographyOp ty =
throwError $ buildMsg ty [PGGeometry, PGGeography]
parseWithTy ty = rhsParser (PgTypeSimple ty) val
parseWithTy ty = rhsParser (PGTypeScalar ty) val
-- parse one with the column's type
parseOne = parseWithTy colTy
parseOneNoSess ty = rhsParser (PgTypeSimple ty)
parseOneNoSess ty = rhsParser (PGTypeScalar ty)
parseManyWithType ty = rhsParser (PgTypeArray ty) val
parseManyWithType ty = rhsParser (PGTypeArray ty) val
guardType validTys = unless (isScalarColumnWhere (`elem` validTys) colTy) $
throwError $ buildMsg colTy validTys
buildMsg ty expTys = err400 UnexpectedPayload
$ " is of type " <> ty <<> "; this operator works only on columns of type "
<> T.intercalate "/" (map dquote expTys)
parseVal :: (FromJSON a) => m a
parseVal = decodeValue val
buildMsg :: PGColType -> [PGColType] -> QErr
buildMsg ty expTys =
err400 UnexpectedPayload $ mconcat
[ " is of type " <> T.pack (show ty)
, "; this operator works "
, "only on columns of type "
, T.intercalate "/" $ map (T.dquote . T.pack . show) expTys
]
textOnlyOp :: (MonadError QErr m) => PGColType -> m ()
textOnlyOp PGText = return ()
textOnlyOp PGVarchar = return ()
textOnlyOp ty =
throwError $ buildMsg ty [PGVarchar, PGText]
-- This convoluted expression instead of col = val
-- to handle the case of col : null
equalsBoolExpBuilder :: S.SQLExp -> S.SQLExp -> S.BoolExp
@ -299,7 +275,7 @@ notEqualsBoolExpBuilder qualColExp rhsExp =
annBoolExp
:: (QErrM m, CacheRM m)
=> OpRhsParser m v
-> FieldInfoMap
-> FieldInfoMap PGColumnInfo
-> BoolExp
-> m (AnnBoolExp v)
annBoolExp rhsParser fim (BoolExp boolExp) =
@ -308,13 +284,13 @@ annBoolExp rhsParser fim (BoolExp boolExp) =
annColExp
:: (QErrM m, CacheRM m)
=> OpRhsParser m v
-> FieldInfoMap
-> FieldInfoMap PGColumnInfo
-> ColExp
-> m (AnnBoolExpFld v)
annColExp rhsParser colInfoMap (ColExp fieldName colVal) = do
colInfo <- askFieldInfo colInfoMap fieldName
case colInfo of
FIColumn (PGColInfo _ PGJSON _) ->
FIColumn (PGColumnInfo _ (PGColumnScalar PGJSON) _) ->
throwError (err400 UnexpectedPayload "JSON column can not be part of where clause")
FIColumn pgi ->
AVCol pgi <$> parseOperationsExpression rhsParser colInfoMap pgi colVal
@ -337,7 +313,7 @@ convBoolRhs' tq =
convColRhs
:: S.Qual -> AnnBoolExpFldSQL -> State Word64 S.BoolExp
convColRhs tableQual = \case
AVCol (PGColInfo cn _ _) opExps -> do
AVCol (PGColumnInfo cn _ _) opExps -> do
let bExps = map (mkColCompExp tableQual cn) opExps
return $ foldr (S.BEBin S.AndOp) (S.BELit True) bExps
@ -359,18 +335,6 @@ convColRhs tableQual = \case
where
mkQCol q = S.SEQIden . S.QIden q . toIden
pgValParser
:: (MonadError QErr m)
=> PGColType -> Value -> m PGColValue
pgValParser ty =
runAesonParser (parsePGValue ty)
txtRHSBuilder
:: (MonadError QErr m)
=> PGColType -> Value -> m S.SQLExp
txtRHSBuilder ty val =
toTxtValue ty <$> pgValParser ty val
mkColCompExp
:: S.Qual -> PGCol -> OpExpG S.SQLExp -> S.BoolExp
mkColCompExp qual lhsCol = mkCompExp (mkQCol lhsCol)
@ -432,11 +396,10 @@ mkColCompExp qual lhsCol = mkCompExp (mkQCol lhsCol)
mkCastsExp casts =
sqlAll . flip map (M.toList casts) $ \(targetType, operations) ->
let targetAnn = pgTypeToAnnType targetType
let targetAnn = S.mkTypeAnn $ PGTypeScalar targetType
in sqlAll $ map (mkCompExp (S.SETyAnn lhs targetAnn)) operations
sqlAll = foldr (S.BEBin S.AndOp) (S.BELit True)
pgTypeToAnnType = S.TypeAnn . T.pack . show
hasStaticExp :: OpExpG PartialSQLExp -> Bool
hasStaticExp = has (template . filtered isStaticValue)

View File

@ -41,6 +41,7 @@ import Hasura.Db as R
import Hasura.EncJSON
import Hasura.Prelude
import Hasura.RQL.Types.BoolExp as R
import Hasura.RQL.Types.Column as R
import Hasura.RQL.Types.Common as R
import Hasura.RQL.Types.DML as R
import Hasura.RQL.Types.Error as R
@ -60,9 +61,9 @@ import qualified Network.HTTP.Client as HTTP
getFieldInfoMap
:: QualifiedTable
-> SchemaCache -> Maybe FieldInfoMap
-> SchemaCache -> Maybe (FieldInfoMap PGColumnInfo)
getFieldInfoMap tn =
fmap tiFieldInfoMap . M.lookup tn . scTables
fmap _tiFieldInfoMap . M.lookup tn . scTables
data QCtx
= QCtx
@ -85,7 +86,7 @@ class (Monad m) => UserInfoM m where
askTabInfo
:: (QErrM m, CacheRM m)
=> QualifiedTable -> m TableInfo
=> QualifiedTable -> m (TableInfo PGColumnInfo)
askTabInfo tabName = do
rawSchemaCache <- askSchemaCache
liftMaybe (err400 NotExists errMsg) $ M.lookup tabName $ scTables rawSchemaCache
@ -94,11 +95,11 @@ askTabInfo tabName = do
askTabInfoFromTrigger
:: (QErrM m, CacheRM m)
=> TriggerName -> m TableInfo
=> TriggerName -> m (TableInfo PGColumnInfo)
askTabInfoFromTrigger trn = do
sc <- askSchemaCache
let tabInfos = M.elems $ scTables sc
liftMaybe (err400 NotExists errMsg) $ find (isJust.M.lookup trn.tiEventTriggerInfoMap) tabInfos
liftMaybe (err400 NotExists errMsg) $ find (isJust.M.lookup trn._tiEventTriggerInfoMap) tabInfos
where
errMsg = "event trigger " <> triggerNameToTxt trn <<> " does not exist"
@ -107,7 +108,7 @@ askEventTriggerInfo
=> TriggerName -> m EventTriggerInfo
askEventTriggerInfo trn = do
ti <- askTabInfoFromTrigger trn
let etim = tiEventTriggerInfoMap ti
let etim = _tiEventTriggerInfoMap ti
liftMaybe (err400 NotExists errMsg) $ M.lookup trn etim
where
errMsg = "event trigger " <> triggerNameToTxt trn <<> " does not exist"
@ -164,7 +165,7 @@ liftP1WithQCtx r m =
askFieldInfoMap
:: (QErrM m, CacheRM m)
=> QualifiedTable -> m FieldInfoMap
=> QualifiedTable -> m (FieldInfoMap PGColumnInfo)
askFieldInfoMap tabName = do
mFieldInfoMap <- getFieldInfoMap tabName <$> askSchemaCache
maybe (throw400 NotExists errMsg) return mFieldInfoMap
@ -173,19 +174,19 @@ askFieldInfoMap tabName = do
askPGType
:: (MonadError QErr m)
=> FieldInfoMap
=> FieldInfoMap PGColumnInfo
-> PGCol
-> T.Text
-> m PGColType
-> m PGColumnType
askPGType m c msg =
pgiType <$> askPGColInfo m c msg
askPGColInfo
:: (MonadError QErr m)
=> FieldInfoMap
=> FieldInfoMap columnInfo
-> PGCol
-> T.Text
-> m PGColInfo
-> m columnInfo
askPGColInfo m c msg = do
colInfo <- modifyErr ("column " <>) $
askFieldInfo m (fromPGCol c)
@ -200,16 +201,16 @@ askPGColInfo m c msg = do
]
assertPGCol :: (MonadError QErr m)
=> FieldInfoMap
=> FieldInfoMap columnInfo
-> T.Text
-> PGCol
-> m ()
assertPGCol m msg c = do
_ <- askPGType m c msg
_ <- askPGColInfo m c msg
return ()
askRelType :: (MonadError QErr m)
=> FieldInfoMap
=> FieldInfoMap columnInfo
-> RelName
-> T.Text
-> m RelInfo
@ -226,9 +227,9 @@ askRelType m r msg = do
]
askFieldInfo :: (MonadError QErr m)
=> FieldInfoMap
=> FieldInfoMap columnInfo
-> FieldName
-> m FieldInfo
-> m (FieldInfo columnInfo)
askFieldInfo m f =
case M.lookup f m of
Just colInfo -> return colInfo

View File

@ -20,6 +20,7 @@ module Hasura.RQL.Types.BoolExp
, AnnBoolExpFldSQL
, AnnBoolExpSQL
, PartialSQLExp(..)
, mkTypedSessionVar
, isStaticValue
, AnnBoolExpFldPartialSQL
, AnnBoolExpPartialSQL
@ -30,6 +31,7 @@ module Hasura.RQL.Types.BoolExp
) where
import Hasura.Prelude
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Permission
import qualified Hasura.SQL.DML as S
@ -121,7 +123,7 @@ data DWithinGeogOp a =
} deriving (Show, Eq, Functor, Foldable, Traversable, Data)
$(deriveJSON (aesonDrop 6 snakeCase) ''DWithinGeogOp)
type CastExp a = M.HashMap PGColType [OpExpG a]
type CastExp a = M.HashMap PGScalarType [OpExpG a]
data OpExpG a
= ACast !(CastExp a)
@ -236,7 +238,7 @@ opExpToJPair f = \case
opExpsToJSON = object . map (opExpToJPair f)
data AnnBoolExpFld a
= AVCol !PGColInfo ![OpExpG a]
= AVCol !PGColumnInfo ![OpExpG a]
| AVRel !RelInfo !(AnnBoolExp a)
deriving (Show, Eq, Functor, Foldable, Traversable)
@ -280,10 +282,14 @@ type PreSetCols = M.HashMap PGCol S.SQLExp
-- doesn't resolve the session variable
data PartialSQLExp
= PSESessVar !PgType !SessVar
= PSESessVar !(PGType PGScalarType) !SessVar
| PSESQLExp !S.SQLExp
deriving (Show, Eq, Data)
mkTypedSessionVar :: PGType PGColumnType -> SessVar -> PartialSQLExp
mkTypedSessionVar columnType =
PSESessVar (unsafePGColumnToRepresentation <$> columnType)
instance ToJSON PartialSQLExp where
toJSON = \case
PSESessVar colTy sessVar -> toJSON (colTy, sessVar)

View File

@ -1,8 +1,25 @@
module Hasura.RQL.Types.Catalog where
-- | Types that represent the raw data stored in the catalog. See also: the module documentation for
-- "Hasura.RQL.DDL.Schema".
module Hasura.RQL.Types.Catalog
( CatalogMetadata(..)
, CatalogTable(..)
, CatalogTableInfo(..)
, CatalogRelation(..)
, CatalogPermission(..)
, CatalogEventTrigger(..)
, CatalogFunction(..)
) where
import Hasura.Prelude
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.EventTrigger
import Hasura.RQL.Types.Permission
@ -11,15 +28,21 @@ import Hasura.RQL.Types.RemoteSchema
import Hasura.RQL.Types.SchemaCache
import Hasura.SQL.Types
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
data CatalogTableInfo
= CatalogTableInfo
{ _ctiColumns :: ![PGRawColumnInfo]
, _ctiConstraints :: ![ConstraintName]
, _ctiPrimaryKeyColumns :: ![PGCol]
, _ctiViewInfo :: !(Maybe ViewInfo)
} deriving (Show, Eq)
$(deriveJSON (aesonDrop 4 snakeCase) ''CatalogTableInfo)
data CatalogTable
= CatalogTable
{ _ctTable :: !QualifiedTable
, _ctSystemDefined :: !Bool
, _ctInfo :: !(Maybe TableInfo)
{ _ctName :: !QualifiedTable
, _ctIsSystemDefined :: !Bool
, _ctIsEnum :: !Bool
, _ctInfo :: !(Maybe CatalogTableInfo)
} deriving (Show, Eq)
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogTable)

View File

@ -0,0 +1,156 @@
module Hasura.RQL.Types.Column
( PGColumnType(..)
, _PGColumnScalar
, _PGColumnEnumReference
, isScalarColumnWhere
, parsePGScalarValue
, parsePGScalarValues
, unsafePGColumnToRepresentation
, PGColumnInfo(..)
, PGRawColumnInfo(..)
, onlyIntCols
, onlyNumCols
, onlyJSONBCols
, onlyComparableCols
, getColInfos
, EnumReference(..)
, EnumValues
, EnumValue(..)
, EnumValueInfo(..)
) where
import Hasura.Prelude
import qualified Data.HashMap.Strict as M
import qualified Data.Text as T
import Control.Lens.TH
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Language.Haskell.TH.Syntax (Lift)
import Hasura.RQL.Instances ()
import Hasura.RQL.Types.Error
import Hasura.SQL.Types
import Hasura.SQL.Value
newtype EnumValue
= EnumValue { getEnumValue :: T.Text }
deriving (Show, Eq, Lift, Hashable, ToJSON, ToJSONKey, FromJSON, FromJSONKey)
newtype EnumValueInfo
= EnumValueInfo
{ evComment :: Maybe T.Text
} deriving (Show, Eq, Lift, Hashable)
$(deriveJSON (aesonDrop 2 snakeCase) ''EnumValueInfo)
type EnumValues = M.HashMap EnumValue EnumValueInfo
-- | Represents a reference to an “enum table,” a single-column Postgres table that is referenced
-- via foreign key.
data EnumReference
= EnumReference
{ erTable :: !QualifiedTable
, erValues :: !EnumValues
} deriving (Show, Eq, Generic, Lift)
instance Hashable EnumReference
$(deriveJSON (aesonDrop 2 snakeCase) ''EnumReference)
-- | The type we use for columns, which are currently always “scalars” (though see the note about
-- 'PGType'). Unlike 'PGScalarType', which represents a type that /Postgres/ knows about, this type
-- characterizes distinctions we make but Postgres doesnt.
data PGColumnType
-- | Ordinary Postgres columns.
= PGColumnScalar !PGScalarType
-- | Columns that reference enum tables (see "Hasura.RQL.Schema.Enum"). This is not actually a
-- distinct type from the perspective of Postgres (at the time of this writing, we ensure they
-- always have type @text@), but we really want to distinguish this case, since we treat it
-- /completely/ differently in the GraphQL schema.
| PGColumnEnumReference !EnumReference
deriving (Show, Eq, Generic)
instance Hashable PGColumnType
$(deriveToJSON defaultOptions{constructorTagModifier = drop 8} ''PGColumnType)
$(makePrisms ''PGColumnType)
instance DQuote PGColumnType where
dquoteTxt = \case
PGColumnScalar scalar -> dquoteTxt scalar
PGColumnEnumReference (EnumReference tableName _) -> dquoteTxt tableName
isScalarColumnWhere :: (PGScalarType -> Bool) -> PGColumnType -> Bool
isScalarColumnWhere f = \case
PGColumnScalar scalar -> f scalar
PGColumnEnumReference _ -> False
-- | Gets the representation type associated with a 'PGColumnType'. Avoid using this if possible.
-- Prefer 'parsePGScalarValue', 'parsePGScalarValues', or
-- 'Hasura.RQL.Types.BoolExp.mkTypedSessionVar'.
unsafePGColumnToRepresentation :: PGColumnType -> PGScalarType
unsafePGColumnToRepresentation = \case
PGColumnScalar scalarType -> scalarType
PGColumnEnumReference _ -> PGText
parsePGScalarValue :: (MonadError QErr m) => PGColumnType -> Value -> m (WithScalarType PGScalarValue)
parsePGScalarValue columnType value = case columnType of
PGColumnScalar scalarType ->
WithScalarType scalarType <$> runAesonParser (parsePGValue scalarType) value
PGColumnEnumReference (EnumReference tableName enumValues) -> do
let typeName = snakeCaseQualObject tableName
flip runAesonParser value . withText (T.unpack typeName) $ \textValue -> do
let enumTextValues = map getEnumValue $ M.keys enumValues
unless (textValue `elem` enumTextValues) $
fail . T.unpack
$ "expected one of the values " <> T.intercalate ", " (map dquote enumTextValues)
<> " for type " <> typeName <<> ", given " <>> textValue
pure $ WithScalarType PGText (PGValText textValue)
parsePGScalarValues
:: (MonadError QErr m)
=> PGColumnType -> [Value] -> m (WithScalarType [PGScalarValue])
parsePGScalarValues columnType values = do
scalarValues <- indexedMapM (fmap pstValue . parsePGScalarValue columnType) values
pure $ WithScalarType (unsafePGColumnToRepresentation columnType) scalarValues
-- | “Raw” column info, as stored in the catalog (but not in the schema cache). Instead of
-- containing a 'PGColumnType', it only contains a 'PGScalarType', which is combined with the
-- 'pcirReferences' field and other table data to eventually resolve the type to a 'PGColumnType'.
data PGRawColumnInfo
= PGRawColumnInfo
{ prciName :: !PGCol
, prciType :: !PGScalarType
, prciIsNullable :: !Bool
, prciReferences :: ![QualifiedTable]
-- ^ only stores single-column references to primary key of foreign tables (used for detecting
-- references to enum tables)
} deriving (Show, Eq)
$(deriveJSON (aesonDrop 4 snakeCase) ''PGRawColumnInfo)
-- | “Resolved” column info, produced from a 'PGRawColumnInfo' value that has been combined with other
-- schema information to produce a 'PGColumnType'.
data PGColumnInfo
= PGColumnInfo
{ pgiName :: !PGCol
, pgiType :: !PGColumnType
, pgiIsNullable :: !Bool
} deriving (Show, Eq)
$(deriveToJSON (aesonDrop 3 snakeCase) ''PGColumnInfo)
onlyIntCols :: [PGColumnInfo] -> [PGColumnInfo]
onlyIntCols = filter (isScalarColumnWhere isIntegerType . pgiType)
onlyNumCols :: [PGColumnInfo] -> [PGColumnInfo]
onlyNumCols = filter (isScalarColumnWhere isNumType . pgiType)
onlyJSONBCols :: [PGColumnInfo] -> [PGColumnInfo]
onlyJSONBCols = filter (isScalarColumnWhere (== PGJSONB) . pgiType)
onlyComparableCols :: [PGColumnInfo] -> [PGColumnInfo]
onlyComparableCols = filter (isScalarColumnWhere isComparableType . pgiType)
getColInfos :: [PGCol] -> [PGColumnInfo] -> [PGColumnInfo]
getColInfos cols allColInfos =
flip filter allColInfos $ \ci -> pgiName ci `elem` cols

View File

@ -1,6 +1,5 @@
module Hasura.RQL.Types.Common
( PGColInfo(..)
, RelName(..)
( RelName(..)
, relNameToTxt
, RelType(..)
, rootRelName
@ -38,15 +37,6 @@ import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
import qualified PostgreSQL.Binary.Decoding as PD
data PGColInfo
= PGColInfo
{ pgiName :: !PGCol
, pgiType :: !PGColType
, pgiIsNullable :: !Bool
} deriving (Show, Eq)
$(deriveJSON (aesonDrop 3 snakeCase) ''PGColInfo)
newtype NonEmptyText = NonEmptyText {unNonEmptyText :: T.Text}
deriving (Show, Eq, Ord, Hashable, ToJSON, ToJSONKey, Lift, Q.ToPrepArg, DQuote)

View File

@ -18,7 +18,8 @@ data MetadataObjType
| MOTEventTrigger
| MOTFunction
| MOTRemoteSchema
deriving (Eq)
deriving (Eq, Generic)
instance Hashable MetadataObjType
instance Show MetadataObjType where
show MOTTable = "table"
@ -36,7 +37,6 @@ data TableMetadataObjId
| MTOPerm !RoleName !PermType
| MTOTrigger !TriggerName
deriving (Show, Eq, Generic)
instance Hashable TableMetadataObjId
data MetadataObjId
@ -45,7 +45,6 @@ data MetadataObjId
| MORemoteSchema !RemoteSchemaName
| MOTableObj !QualifiedTable !TableMetadataObjId
deriving (Show, Eq, Generic)
instance Hashable MetadataObjId
data InconsistentMetadataObj
@ -54,7 +53,8 @@ data InconsistentMetadataObj
, _moType :: !MetadataObjType
, _moDef :: !Value
, _moReason :: !T.Text
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance Hashable InconsistentMetadataObj
instance ToJSON InconsistentMetadataObj where
toJSON (InconsistentMetadataObj _ ty info rsn) =

View File

@ -2,27 +2,36 @@
{-# LANGUAGE RankNTypes #-}
module Hasura.RQL.Types.SchemaCache
( TableCache
, SchemaCache(..)
( SchemaCache(..)
, SchemaCacheVer
, initSchemaCacheVer
, incSchemaCacheVer
, emptySchemaCache
, TableCache
, modTableCache
, addTableToCache
, modTableInCache
, delTableFromCache
, TableInfo(..)
, tiName
, tiSystemDefined
, tiFieldInfoMap
, tiRolePermInfoMap
, tiUniqOrPrimConstraints
, tiPrimaryKeyCols
, tiViewInfo
, tiEventTriggerInfoMap
, tiEnumValues
, TableConstraint(..)
, ConstraintType(..)
, ViewInfo(..)
, isMutable
, mutableView
, onlyIntCols
, onlyNumCols
, onlyJSONBCols
, onlyComparableCols
, isUniqueOrPrimary
, isForeignKey
, addTableToCache
, modTableInCache
, delTableFromCache
, RemoteSchemaCtx(..)
, RemoteSchemaMap
@ -36,17 +45,16 @@ module Hasura.RQL.Types.SchemaCache
, FieldInfoMap
, FieldInfo(..)
, _FIColumn
, _FIRelationship
, fieldInfoToEither
, partitionFieldInfos
, partitionFieldInfosWith
, getCols
, getRels
, PGColInfo(..)
, isPGColInfo
, getColInfos
, RelInfo(..)
-- , addFldToCache
, addColToCache
, addRelToCache
@ -107,6 +115,7 @@ import qualified Hasura.GraphQL.Context as GC
import Hasura.Prelude
import Hasura.RQL.Types.BoolExp
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.EventTrigger
@ -137,58 +146,42 @@ mkColDep :: DependencyReason -> QualifiedTable -> PGCol -> SchemaDependency
mkColDep reason tn col =
flip SchemaDependency reason . SOTableObj tn $ TOCol col
onlyIntCols :: [PGColInfo] -> [PGColInfo]
onlyIntCols = filter (isIntegerType . pgiType)
onlyNumCols :: [PGColInfo] -> [PGColInfo]
onlyNumCols = filter (isNumType . pgiType)
onlyJSONBCols :: [PGColInfo] -> [PGColInfo]
onlyJSONBCols = filter (isJSONBType . pgiType)
onlyComparableCols :: [PGColInfo] -> [PGColInfo]
onlyComparableCols = filter (isComparableType . pgiType)
getColInfos :: [PGCol] -> [PGColInfo] -> [PGColInfo]
getColInfos cols allColInfos =
flip filter allColInfos $ \ci -> pgiName ci `elem` cols
type WithDeps a = (a, [SchemaDependency])
data FieldInfo
= FIColumn !PGColInfo
data FieldInfo columnInfo
= FIColumn !columnInfo
| FIRelationship !RelInfo
deriving (Show, Eq)
$(deriveToJSON
defaultOptions { constructorTagModifier = snakeCase . drop 2
, sumEncoding = TaggedObject "type" "detail"
}
''FieldInfo)
$(makePrisms ''FieldInfo)
fieldInfoToEither :: FieldInfo -> Either PGColInfo RelInfo
fieldInfoToEither :: FieldInfo columnInfo -> Either columnInfo RelInfo
fieldInfoToEither (FIColumn l) = Left l
fieldInfoToEither (FIRelationship r) = Right r
partitionFieldInfos :: [FieldInfo] -> ([PGColInfo], [RelInfo])
partitionFieldInfos :: [FieldInfo columnInfo] -> ([columnInfo], [RelInfo])
partitionFieldInfos = partitionFieldInfosWith (id, id)
partitionFieldInfosWith :: (PGColInfo -> a, RelInfo -> b)
-> [FieldInfo] -> ([a], [b])
partitionFieldInfosWith :: (columnInfo -> a, RelInfo -> b)
-> [FieldInfo columnInfo] -> ([a], [b])
partitionFieldInfosWith fns =
partitionEithers . map (biMapEither fns . fieldInfoToEither)
where
biMapEither (f1, f2) = either (Left . f1) (Right . f2)
type FieldInfoMap = M.HashMap FieldName FieldInfo
type FieldInfoMap columnInfo = M.HashMap FieldName (FieldInfo columnInfo)
getCols :: FieldInfoMap -> [PGColInfo]
getCols :: FieldInfoMap columnInfo -> [columnInfo]
getCols fim = lefts $ map fieldInfoToEither $ M.elems fim
getRels :: FieldInfoMap -> [RelInfo]
getRels :: FieldInfoMap columnInfo -> [RelInfo]
getRels fim = rights $ map fieldInfoToEither $ M.elems fim
isPGColInfo :: FieldInfo -> Bool
isPGColInfo :: FieldInfo columnInfo -> Bool
isPGColInfo (FIColumn _) = True
isPGColInfo _ = False
@ -331,32 +324,20 @@ mutableView qt f mVI operation =
unless (isMutable f mVI) $ throw400 NotSupported $
"view " <> qt <<> " is not " <> operation
data TableInfo
data TableInfo columnInfo
= TableInfo
{ tiName :: !QualifiedTable
, tiSystemDefined :: !Bool
, tiFieldInfoMap :: !FieldInfoMap
, tiRolePermInfoMap :: !RolePermInfoMap
, tiUniqOrPrimConstraints :: ![ConstraintName]
, tiPrimaryKeyCols :: ![PGCol]
, tiViewInfo :: !(Maybe ViewInfo)
, tiEventTriggerInfoMap :: !EventTriggerInfoMap
{ _tiName :: !QualifiedTable
, _tiSystemDefined :: !Bool
, _tiFieldInfoMap :: !(FieldInfoMap columnInfo)
, _tiRolePermInfoMap :: !RolePermInfoMap
, _tiUniqOrPrimConstraints :: ![ConstraintName]
, _tiPrimaryKeyCols :: ![PGCol]
, _tiViewInfo :: !(Maybe ViewInfo)
, _tiEventTriggerInfoMap :: !EventTriggerInfoMap
, _tiEnumValues :: !(Maybe EnumValues)
} deriving (Show, Eq)
$(deriveToJSON (aesonDrop 2 snakeCase) ''TableInfo)
instance FromJSON TableInfo where
parseJSON = withObject "TableInfo" $ \o -> do
name <- o .: "name"
columns <- o .: "columns"
pkeyCols <- o .: "primary_key_columns"
constraints <- o .: "constraints"
viewInfoM <- o .:? "view_info"
isSystemDefined <- o .:? "is_system_defined" .!= False
let colMap = M.fromList $ flip map columns $
\c -> (fromPGCol $ pgiName c, FIColumn c)
return $ TableInfo name isSystemDefined colMap mempty
constraints pkeyCols viewInfoM mempty
$(makeLenses ''TableInfo)
data FunctionType
= FTVOLATILE
@ -381,8 +362,8 @@ newtype FunctionArgName =
data FunctionArg
= FunctionArg
{ faName :: !(Maybe FunctionArgName)
, faType :: !PGColType
} deriving(Show, Eq)
, faType :: !PGScalarType
} deriving (Show, Eq)
$(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionArg)
@ -398,7 +379,7 @@ data FunctionInfo
$(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionInfo)
type TableCache = M.HashMap QualifiedTable TableInfo -- info of all tables
type TableCache columnInfo = M.HashMap QualifiedTable (TableInfo columnInfo) -- info of all tables
type FunctionCache = M.HashMap QualifiedFunction FunctionInfo -- info of all functions
data RemoteSchemaCtx
@ -443,7 +424,7 @@ incSchemaCacheVer (SchemaCacheVer prev) =
data SchemaCache
= SchemaCache
{ scTables :: !TableCache
{ scTables :: !(TableCache PGColumnInfo)
, scFunctions :: !FunctionCache
, scRemoteSchemas :: !RemoteSchemaMap
, scAllowlist :: !(HS.HashSet GQLQuery)
@ -466,16 +447,12 @@ modDepMapInCache f = do
writeSchemaCache $ sc { scDepMap = f (scDepMap sc)}
class (Monad m) => CacheRM m where
-- Get the schema cache
askSchemaCache :: m SchemaCache
instance (Monad m) => CacheRM (StateT SchemaCache m) where
askSchemaCache = get
class (CacheRM m) => CacheRWM m where
-- Get the schema cache
writeSchemaCache :: SchemaCache -> m ()
instance (Monad m) => CacheRWM (StateT SchemaCache m) where
@ -486,19 +463,19 @@ emptySchemaCache =
SchemaCache M.empty M.empty M.empty
HS.empty M.empty GC.emptyGCtx mempty []
modTableCache :: (CacheRWM m) => TableCache -> m ()
modTableCache :: (CacheRWM m) => TableCache PGColumnInfo -> m ()
modTableCache tc = do
sc <- askSchemaCache
writeSchemaCache $ sc { scTables = tc }
addTableToCache :: (QErrM m, CacheRWM m)
=> TableInfo -> m ()
=> TableInfo PGColumnInfo -> m ()
addTableToCache ti = do
sc <- askSchemaCache
assertTableNotExists tn sc
modTableCache $ M.insert tn ti $ scTables sc
where
tn = tiName ti
tn = _tiName ti
delTableFromCache :: (QErrM m, CacheRWM m)
=> QualifiedTable -> m ()
@ -514,7 +491,7 @@ delTableFromCache tn = do
getTableInfoFromCache :: (QErrM m)
=> QualifiedTable
-> SchemaCache
-> m TableInfo
-> m (TableInfo PGColumnInfo)
getTableInfoFromCache tn sc =
case M.lookup tn (scTables sc) of
Nothing -> throw500 $ "table not found in cache : " <>> tn
@ -530,7 +507,7 @@ assertTableNotExists tn sc =
Just _ -> throw500 $ "table exists in cache : " <>> tn
modTableInCache :: (QErrM m, CacheRWM m)
=> (TableInfo -> m TableInfo)
=> (TableInfo PGColumnInfo -> m (TableInfo PGColumnInfo))
-> QualifiedTable
-> m ()
modTableInCache f tn = do
@ -541,7 +518,7 @@ modTableInCache f tn = do
addColToCache
:: (QErrM m, CacheRWM m)
=> PGCol -> PGColInfo
=> PGCol -> PGColumnInfo
-> QualifiedTable -> m ()
addColToCache cn ci =
addFldToCache (fromPGCol cn) (FIColumn ci)
@ -558,17 +535,17 @@ addRelToCache rn ri deps tn = do
addFldToCache
:: (QErrM m, CacheRWM m)
=> FieldName -> FieldInfo
=> FieldName -> FieldInfo PGColumnInfo
-> QualifiedTable -> m ()
addFldToCache fn fi =
modTableInCache modFieldInfoMap
where
modFieldInfoMap ti = do
let fim = tiFieldInfoMap ti
let fim = _tiFieldInfoMap ti
case M.lookup fn fim of
Just _ -> throw500 "field already exists "
Nothing -> return $
ti { tiFieldInfoMap = M.insert fn fi fim }
ti { _tiFieldInfoMap = M.insert fn fi fim }
delFldFromCache :: (QErrM m, CacheRWM m)
=> FieldName -> QualifiedTable -> m ()
@ -576,10 +553,10 @@ delFldFromCache fn =
modTableInCache modFieldInfoMap
where
modFieldInfoMap ti = do
let fim = tiFieldInfoMap ti
let fim = _tiFieldInfoMap ti
case M.lookup fn fim of
Just _ -> return $
ti { tiFieldInfoMap = M.delete fn fim }
ti { _tiFieldInfoMap = M.delete fn fim }
Nothing -> throw500 "field does not exist"
delColFromCache :: (QErrM m, CacheRWM m)
@ -597,7 +574,7 @@ delRelFromCache rn tn = do
updColInCache
:: (QErrM m, CacheRWM m)
=> PGCol -> PGColInfo
=> PGCol -> PGColumnInfo
-> QualifiedTable -> m ()
updColInCache cn ci tn = do
delColFromCache cn tn
@ -639,8 +616,8 @@ addEventTriggerToCache qt eti deps = do
where
trn = etiName eti
modEventTriggerInfo ti = do
let etim = tiEventTriggerInfoMap ti
return $ ti { tiEventTriggerInfoMap = M.insert trn eti etim}
let etim = _tiEventTriggerInfoMap ti
return $ ti { _tiEventTriggerInfoMap = M.insert trn eti etim}
schObjId = SOTableObj qt $ TOTrigger trn
delEventTriggerFromCache
@ -653,8 +630,8 @@ delEventTriggerFromCache qt trn = do
modDepMapInCache (removeFromDepMap schObjId)
where
modEventTriggerInfo ti = do
let etim = tiEventTriggerInfoMap ti
return $ ti { tiEventTriggerInfoMap = M.delete trn etim }
let etim = _tiEventTriggerInfoMap ti
return $ ti { _tiEventTriggerInfoMap = M.delete trn etim }
schObjId = SOTableObj qt $ TOTrigger trn
addFunctionToCache
@ -713,11 +690,11 @@ addPermToCache tn rn pa i deps = do
where
paL = permAccToLens pa
modRolePermInfo ti = do
let rpim = tiRolePermInfoMap ti
let rpim = _tiRolePermInfoMap ti
rpi = fromMaybe mkRolePermInfo $ M.lookup rn rpim
newRPI = rpi & paL ?~ i
assertPermNotExists pa rpi
return $ ti { tiRolePermInfoMap = M.insert rn newRPI rpim }
return $ ti { _tiRolePermInfoMap = M.insert rn newRPI rpim }
schObjId = SOTableObj tn $ TOPerm rn $ permAccToType pa
assertPermNotExists
@ -746,11 +723,11 @@ delPermFromCache pa rn tn = do
where
paL = permAccToLens pa
modRolePermInfo ti = do
let rpim = tiRolePermInfoMap ti
let rpim = _tiRolePermInfoMap ti
rpi = fromMaybe mkRolePermInfo $ M.lookup rn rpim
assertPermExists pa rpi
let newRPI = rpi & paL .~ Nothing
return $ ti { tiRolePermInfoMap = M.insert rn newRPI rpim }
return $ ti { _tiRolePermInfoMap = M.insert rn newRPI rpim }
schObjId = SOTableObj tn $ TOPerm rn $ permAccToType pa
addRemoteSchemaToCache

View File

@ -225,23 +225,23 @@ newtype TypeAnn
= TypeAnn {unTypeAnn :: T.Text}
deriving (Show, Eq, Data)
mkTypeAnn :: PgType -> TypeAnn
mkTypeAnn = TypeAnn . T.pack . show
mkTypeAnn :: PGType PGScalarType -> TypeAnn
mkTypeAnn = TypeAnn . toSQLTxt
intTypeAnn :: TypeAnn
intTypeAnn = mkTypeAnn $ PgTypeSimple PGInteger
intTypeAnn = mkTypeAnn $ PGTypeScalar PGInteger
textTypeAnn :: TypeAnn
textTypeAnn = mkTypeAnn $ PgTypeSimple PGText
textTypeAnn = mkTypeAnn $ PGTypeScalar PGText
textArrTypeAnn :: TypeAnn
textArrTypeAnn = mkTypeAnn $ PgTypeArray PGText
textArrTypeAnn = mkTypeAnn $ PGTypeArray PGText
jsonTypeAnn :: TypeAnn
jsonTypeAnn = mkTypeAnn $ PgTypeSimple PGJSON
jsonTypeAnn = mkTypeAnn $ PGTypeScalar PGJSON
jsonbTypeAnn :: TypeAnn
jsonbTypeAnn = mkTypeAnn $ PgTypeSimple PGJSONB
jsonbTypeAnn = mkTypeAnn $ PGTypeScalar PGJSONB
data CountType
= CTStar
@ -266,6 +266,7 @@ instance ToSQL TupleExp where
data SQLExp
= SEPrep !Int
| SENull
| SELit !T.Text
| SEUnsafe !T.Text
| SESelect !Select
@ -285,8 +286,8 @@ data SQLExp
| SECount !CountType
deriving (Show, Eq, Data)
withTyAnn :: PGColType -> SQLExp -> SQLExp
withTyAnn colTy v = SETyAnn v $ TypeAnn $ T.pack $ show colTy
withTyAnn :: PGScalarType -> SQLExp -> SQLExp
withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PGTypeScalar colTy
instance J.ToJSON SQLExp where
toJSON = J.toJSON . toSQLTxt
@ -310,6 +311,8 @@ countStar = SECount CTStar
instance ToSQL SQLExp where
toSQL (SEPrep argNumber) =
TB.char '$' <> fromString (show argNumber)
toSQL SENull =
TB.text "null"
toSQL (SELit tv) =
TB.text $ pgFmtLit tv
toSQL (SEUnsafe t) =

View File

@ -0,0 +1,97 @@
-- | Functions and datatypes for interpreting Postgres errors.
module Hasura.SQL.Error
( PGErrorType(..)
, _PGDataException
, _PGIntegrityConstraintViolation
, _PGSyntaxErrorOrAccessRuleViolation
, pgErrorType
, PGErrorCode(..)
, _PGErrorGeneric
, _PGErrorSpecific
, PGDataException(..)
, PGIntegrityConstraintViolation(..)
, PGSyntaxErrorOrAccessRuleViolation(..)
) where
import Hasura.Prelude
import Control.Lens.TH (makePrisms)
import qualified Data.Text as T
import qualified Database.PG.Query.Connection as Q
-- | The top-level error code type. Errors in Postgres are divided into different /classes/, which
-- are further subdivided into individual error codes. Even if a particular status code is not known
-- to the application, its possible to determine its class and handle it appropriately.
data PGErrorType
= PGDataException !(Maybe (PGErrorCode PGDataException))
| PGIntegrityConstraintViolation !(Maybe (PGErrorCode PGIntegrityConstraintViolation))
| PGSyntaxErrorOrAccessRuleViolation !(Maybe (PGErrorCode PGSyntaxErrorOrAccessRuleViolation))
deriving (Show, Eq)
data PGErrorCode a
= PGErrorGeneric
-- ^ represents errors that have the non-specific @000@ status code
| PGErrorSpecific !a
-- ^ represents errors with a known, more specific status code
deriving (Show, Eq, Functor)
data PGDataException
= PGInvalidDatetimeFormat
| PGInvalidParameterValue
| PGInvalidEscapeSequence
| PGInvalidTextRepresentation
deriving (Show, Eq)
data PGIntegrityConstraintViolation
= PGRestrictViolation
| PGNotNullViolation
| PGForeignKeyViolation
| PGUniqueViolation
| PGCheckViolation
| PGExclusionViolation
deriving (Show, Eq)
data PGSyntaxErrorOrAccessRuleViolation
= PGUndefinedObject
| PGInvalidColumnReference
deriving (Show, Eq)
$(makePrisms ''PGErrorType)
$(makePrisms ''PGErrorCode)
pgErrorType :: Q.PGStmtErrDetail -> Maybe PGErrorType
pgErrorType errorDetails = parseTypes =<< Q.edStatusCode errorDetails
where
parseTypes fullCodeText = choice
[ withClass "22" PGDataException
[ code "007" PGInvalidDatetimeFormat
, code "023" PGInvalidParameterValue
, code "025" PGInvalidEscapeSequence
, code "P02" PGInvalidTextRepresentation
]
, withClass "23" PGIntegrityConstraintViolation
[ code "001" PGRestrictViolation
, code "502" PGNotNullViolation
, code "503" PGForeignKeyViolation
, code "505" PGUniqueViolation
, code "514" PGCheckViolation
, code "P01" PGExclusionViolation
]
, withClass "42" PGSyntaxErrorOrAccessRuleViolation
[ code "704" PGUndefinedObject
, code "P10" PGInvalidColumnReference
]
]
where
(classText, codeText) = T.splitAt 2 fullCodeText
withClass :: T.Text -> (Maybe a -> b) -> [Maybe a] -> Maybe b
withClass expectedClassText mkClass codes =
guard (classText == expectedClassText) $> mkClass (choice codes)
code :: T.Text -> a -> Maybe (PGErrorCode a)
code expectedCodeText codeValue =
guard (codeText == expectedCodeText) $> PGErrorSpecific codeValue

View File

@ -146,6 +146,7 @@ uOrderBy (S.OrderByExp ordByItems) =
uSqlExp :: S.SQLExp -> Uniq S.SQLExp
uSqlExp = restoringIdens . \case
S.SEPrep i -> return $ S.SEPrep i
S.SENull -> return S.SENull
S.SELit t -> return $ S.SELit t
S.SEUnsafe t -> return $ S.SEUnsafe t
S.SESelect s -> S.SESelect <$> uSelect s

View File

@ -7,8 +7,8 @@ import Hasura.Prelude
import Data.Aeson
import Data.Aeson.Encoding (text)
import Data.Aeson.TH
import Data.Aeson.Types (toJSONKeyText)
import Data.String (fromString)
import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
@ -52,6 +52,21 @@ class DQuote a where
instance DQuote T.Text where
dquoteTxt = id
{-# INLINE dquoteTxt #-}
dquote :: (DQuote a) => a -> T.Text
dquote = T.dquote . dquoteTxt
{-# INLINE dquote #-}
infixr 6 <>>
(<>>) :: (DQuote a) => T.Text -> a -> T.Text
(<>>) lTxt a = lTxt <> dquote a
{-# INLINE (<>>) #-}
infixr 6 <<>
(<<>) :: (DQuote a) => a -> T.Text -> T.Text
(<<>) a rTxt = dquote a <> rTxt
{-# INLINE (<<>) #-}
pgFmtIden :: T.Text -> T.Text
pgFmtIden x =
@ -69,18 +84,6 @@ pgFmtLit x =
trimNullChars :: T.Text -> T.Text
trimNullChars = T.takeWhile (/= '\x0')
infixr 6 <>>
(<>>) :: (DQuote a) => T.Text -> a -> T.Text
(<>>) lTxt a =
lTxt <> T.dquote (dquoteTxt a)
{-# INLINE (<>>) #-}
infixr 6 <<>
(<<>) :: (DQuote a) => a -> T.Text -> T.Text
(<<>) a rTxt =
T.dquote (dquoteTxt a) <> rTxt
{-# INLINE (<<>) #-}
instance (ToSQL a) => ToSQL (Maybe a) where
toSQL (Just a) = toSQL a
toSQL Nothing = mempty
@ -244,7 +247,7 @@ showPGCols :: (Foldable t) => t PGCol -> T.Text
showPGCols cols =
T.intercalate ", " $ map (T.dquote . getPGColTxt) $ toList cols
data PGColType
data PGScalarType
= PGSmallInt
| PGInteger
| PGBigInt
@ -265,45 +268,43 @@ data PGColType
| PGGeometry
| PGGeography
| PGUnknown !T.Text
deriving (Eq, Lift, Generic, Data)
deriving (Show, Eq, Lift, Generic, Data)
instance Hashable PGColType
instance Hashable PGScalarType
instance Show PGColType where
show PGSmallInt = "smallint"
show PGInteger = "integer"
show PGBigInt = "bigint"
show PGSerial = "serial"
show PGBigSerial = "bigserial"
show PGFloat = "real"
show PGDouble = "float8"
show PGNumeric = "numeric"
show PGBoolean = "boolean"
show PGChar = "character"
show PGVarchar = "varchar"
show PGText = "text"
show PGDate = "date"
show PGTimeStampTZ = "timestamptz"
show PGTimeTZ = "timetz"
show PGJSON = "json"
show PGJSONB = "jsonb"
show PGGeometry = "geometry"
show PGGeography = "geography"
show (PGUnknown t) = T.unpack t
instance ToSQL PGScalarType where
toSQL = \case
PGSmallInt -> "smallint"
PGInteger -> "integer"
PGBigInt -> "bigint"
PGSerial -> "serial"
PGBigSerial -> "bigserial"
PGFloat -> "real"
PGDouble -> "float8"
PGNumeric -> "numeric"
PGBoolean -> "boolean"
PGChar -> "character"
PGVarchar -> "varchar"
PGText -> "text"
PGDate -> "date"
PGTimeStampTZ -> "timestamptz"
PGTimeTZ -> "timetz"
PGJSON -> "json"
PGJSONB -> "jsonb"
PGGeometry -> "geometry"
PGGeography -> "geography"
PGUnknown t -> TB.text t
instance ToJSON PGColType where
toJSON pct = String $ T.pack $ show pct
instance ToJSON PGScalarType where
toJSON = String . toSQLTxt
instance ToJSONKey PGColType where
toJSONKey = toJSONKeyText (T.pack . show)
instance ToJSONKey PGScalarType where
toJSONKey = toJSONKeyText toSQLTxt
instance ToSQL PGColType where
toSQL pct = fromString $ show pct
instance DQuote PGScalarType where
dquoteTxt = toSQLTxt
instance DQuote PGColType where
dquoteTxt = T.pack . show
txtToPgColTy :: Text -> PGColType
txtToPgColTy :: Text -> PGScalarType
txtToPgColTy t = case t of
"serial" -> PGSerial
"bigserial" -> PGBigSerial
@ -353,11 +354,11 @@ txtToPgColTy t = case t of
_ -> PGUnknown t
instance FromJSON PGColType where
instance FromJSON PGScalarType where
parseJSON (String t) = return $ txtToPgColTy t
parseJSON _ = fail "Expecting a string for PGColType"
parseJSON _ = fail "Expecting a string for PGScalarType"
pgTypeOid :: PGColType -> PQ.Oid
pgTypeOid :: PGScalarType -> PQ.Oid
pgTypeOid PGSmallInt = PTI.int2
pgTypeOid PGInteger = PTI.int4
pgTypeOid PGBigInt = PTI.int8
@ -380,43 +381,29 @@ pgTypeOid PGGeometry = PTI.text
pgTypeOid PGGeography = PTI.text
pgTypeOid (PGUnknown _) = PTI.auto
-- TODO: This is incorrect modelling as PGColType
-- will capture anything under PGUnknown
-- This should be fixed when support for
-- all types is merged.
data PgType
= PgTypeSimple !PGColType
| PgTypeArray !PGColType
deriving (Eq, Data)
instance Show PgType where
show = \case
PgTypeSimple ty -> show ty
-- typename array is an sql standard way
-- of declaring types
PgTypeArray ty -> show ty <> " array"
instance ToJSON PgType where
toJSON = toJSON . show
isIntegerType :: PGColType -> Bool
isIntegerType :: PGScalarType -> Bool
isIntegerType PGInteger = True
isIntegerType PGSmallInt = True
isIntegerType PGBigInt = True
isIntegerType _ = False
isNumType :: PGColType -> Bool
isNumType :: PGScalarType -> Bool
isNumType PGFloat = True
isNumType PGDouble = True
isNumType PGNumeric = True
isNumType ty = isIntegerType ty
isJSONBType :: PGColType -> Bool
isJSONBType PGJSONB = True
isJSONBType _ = False
stringTypes :: [PGScalarType]
stringTypes = [PGVarchar, PGText]
isStringType :: PGScalarType -> Bool
isStringType = (`elem` stringTypes)
isComparableType :: PGColType -> Bool
jsonTypes :: [PGScalarType]
jsonTypes = [PGJSON, PGJSONB]
isJSONType :: PGScalarType -> Bool
isJSONType = (`elem` jsonTypes)
isComparableType :: PGScalarType -> Bool
isComparableType PGJSON = False
isComparableType PGJSONB = False
isComparableType PGGeometry = False
@ -425,7 +412,7 @@ isComparableType PGBoolean = False
isComparableType (PGUnknown _) = False
isComparableType _ = True
isBigNum :: PGColType -> Bool
isBigNum :: PGScalarType -> Bool
isBigNum = \case
PGBigInt -> True
PGBigSerial -> True
@ -433,8 +420,33 @@ isBigNum = \case
PGDouble -> True
_ -> False
isGeoType :: PGColType -> Bool
isGeoType = \case
PGGeometry -> True
PGGeography -> True
_ -> False
geoTypes :: [PGScalarType]
geoTypes = [PGGeometry, PGGeography]
isGeoType :: PGScalarType -> Bool
isGeoType = (`elem` geoTypes)
data WithScalarType a
= WithScalarType
{ pstType :: !PGScalarType
, pstValue :: !a
} deriving (Show, Eq, Functor, Foldable, Traversable)
-- | The type of all Postgres types (i.e. scalars and arrays). This type is parameterized so that
-- we can have both @'PGType' 'PGScalarType'@ and @'PGType' 'Hasura.RQL.Types.PGColumnType'@, for
-- when we care about the distinction made by 'Hasura.RQL.Types.PGColumnType'. If we ever change
-- 'Hasura.RQL.Types.PGColumnType' to handle arrays, not just scalars, then the parameterization can
-- go away.
--
-- TODO: This is incorrect modeling, as 'PGScalarType' will capture anything (under 'PGUnknown').
-- This should be fixed when support for all types is merged.
data PGType a
= PGTypeScalar !a
| PGTypeArray !a
deriving (Show, Eq, Data, Functor)
$(deriveJSON defaultOptions{constructorTagModifier = drop 6} ''PGType)
instance (ToSQL a) => ToSQL (PGType a) where
toSQL = \case
PGTypeScalar ty -> toSQL ty
-- typename array is an sql standard way of declaring types
PGTypeArray ty -> toSQL ty <> " array"

View File

@ -1,4 +1,18 @@
module Hasura.SQL.Value where
module Hasura.SQL.Value
( PGScalarValue(..)
, pgColValueToInt
, withGeoVal
, parsePGValue
, TxtEncodedPGVal
, txtEncodedPGVal
, binEncoder
, txtEncoder
, toBinaryValue
, toTxtValue
, toPrepParam
) where
import Hasura.SQL.GeoJSON
import Hasura.SQL.Time
@ -9,7 +23,6 @@ import qualified Database.PG.Query.PTI as PTI
import qualified Hasura.SQL.DML as S
import Data.Aeson
import Data.Aeson.Internal
import Data.Int
import Data.Scientific
import Data.Time
@ -25,7 +38,7 @@ import qualified Database.PostgreSQL.LibPQ as PQ
import qualified PostgreSQL.Binary.Encoding as PE
-- Binary value. Used in prepared sq
data PGColValue
data PGScalarValue
= PGValInteger !Int32
| PGValSmallInt !Int16
| PGValBigInt !Int64
@ -39,13 +52,53 @@ data PGColValue
| PGValDate !Day
| PGValTimeStampTZ !UTCTime
| PGValTimeTZ !ZonedTimeOfDay
| PGNull !PGColType
| PGNull !PGScalarType
| PGValJSON !Q.JSON
| PGValJSONB !Q.JSONB
| PGValGeo !GeometryWithCRS
| PGValUnknown !T.Text
deriving (Show, Eq)
pgColValueToInt :: PGScalarValue -> Maybe Int
pgColValueToInt (PGValInteger i) = Just $ fromIntegral i
pgColValueToInt (PGValSmallInt i) = Just $ fromIntegral i
pgColValueToInt (PGValBigInt i) = Just $ fromIntegral i
pgColValueToInt _ = Nothing
withGeoVal :: PGScalarType -> S.SQLExp -> S.SQLExp
withGeoVal ty v
| isGeoType ty = S.SEFnApp "ST_GeomFromGeoJSON" [v] Nothing
| otherwise = v
parsePGValue :: PGScalarType -> Value -> AT.Parser PGScalarValue
parsePGValue ty val = case (ty, val) of
(_ , Null) -> pure $ PGNull ty
(PGUnknown _, String t) -> pure $ PGValUnknown t
(_ , String t) -> parseTyped <|> pure (PGValUnknown t)
(_ , _) -> parseTyped
where
parseTyped = case ty of
PGSmallInt -> PGValSmallInt <$> parseJSON val
PGInteger -> PGValInteger <$> parseJSON val
PGBigInt -> PGValBigInt <$> parseJSON val
PGSerial -> PGValInteger <$> parseJSON val
PGBigSerial -> PGValBigInt <$> parseJSON val
PGFloat -> PGValFloat <$> parseJSON val
PGDouble -> PGValDouble <$> parseJSON val
PGNumeric -> PGValNumeric <$> parseJSON val
PGBoolean -> PGValBoolean <$> parseJSON val
PGChar -> PGValChar <$> parseJSON val
PGVarchar -> PGValVarchar <$> parseJSON val
PGText -> PGValText <$> parseJSON val
PGDate -> PGValDate <$> parseJSON val
PGTimeStampTZ -> PGValTimeStampTZ <$> parseJSON val
PGTimeTZ -> PGValTimeTZ <$> parseJSON val
PGJSON -> PGValJSON . Q.JSON <$> parseJSON val
PGJSONB -> PGValJSONB . Q.JSONB <$> parseJSON val
PGGeometry -> PGValGeo <$> parseJSON val
PGGeography -> PGValGeo <$> parseJSON val
PGUnknown tyName -> fail $ "A string is expected for type : " ++ T.unpack tyName
data TxtEncodedPGVal
= TENull
| TELit !Text
@ -58,7 +111,7 @@ instance ToJSON TxtEncodedPGVal where
TENull -> Null
TELit t -> String t
txtEncodedPGVal :: PGColValue -> TxtEncodedPGVal
txtEncodedPGVal :: PGScalarValue -> TxtEncodedPGVal
txtEncodedPGVal colVal = case colVal of
PGValInteger i -> TELit $ T.pack $ show i
PGValSmallInt i -> TELit $ T.pack $ show i
@ -85,154 +138,37 @@ txtEncodedPGVal colVal = case colVal of
AE.encodeToLazyText o
PGValUnknown t -> TELit t
txtEncoder :: PGColValue -> S.SQLExp
binEncoder :: PGScalarValue -> Q.PrepArg
binEncoder colVal = case colVal of
PGValInteger i -> Q.toPrepVal i
PGValSmallInt i -> Q.toPrepVal i
PGValBigInt i -> Q.toPrepVal i
PGValFloat f -> Q.toPrepVal f
PGValDouble d -> Q.toPrepVal d
PGValNumeric sc -> Q.toPrepVal sc
PGValBoolean b -> Q.toPrepVal b
PGValChar t -> Q.toPrepVal t
PGValVarchar t -> Q.toPrepVal t
PGValText t -> Q.toPrepVal t
PGValDate d -> Q.toPrepVal d
PGValTimeStampTZ u -> Q.toPrepVal u
PGValTimeTZ (ZonedTimeOfDay t z) -> Q.toPrepValHelper PTI.timetz PE.timetz_int (t, z)
PGNull ty -> (pgTypeOid ty, Nothing)
PGValJSON u -> Q.toPrepVal u
PGValJSONB u -> Q.toPrepVal u
PGValGeo o -> Q.toPrepVal $ TL.toStrict $ AE.encodeToLazyText o
PGValUnknown t -> (PTI.auto, Just (TE.encodeUtf8 t, PQ.Text))
txtEncoder :: PGScalarValue -> S.SQLExp
txtEncoder colVal = case txtEncodedPGVal colVal of
TENull -> S.SEUnsafe "NULL"
TELit t -> S.SELit t
binEncoder :: PGColValue -> Q.PrepArg
binEncoder colVal = case colVal of
PGValInteger i ->
Q.toPrepVal i
PGValSmallInt i ->
Q.toPrepVal i
PGValBigInt i ->
Q.toPrepVal i
PGValFloat f ->
Q.toPrepVal f
PGValDouble d ->
Q.toPrepVal d
PGValNumeric sc ->
Q.toPrepVal sc
PGValBoolean b ->
Q.toPrepVal b
PGValChar t ->
Q.toPrepVal t
PGValVarchar t ->
Q.toPrepVal t
PGValText t ->
Q.toPrepVal t
PGValDate d ->
Q.toPrepVal d
PGValTimeStampTZ u ->
Q.toPrepVal u
PGValTimeTZ (ZonedTimeOfDay t z) ->
Q.toPrepValHelper PTI.timetz PE.timetz_int (t, z)
PGNull ty ->
(pgTypeOid ty, Nothing)
PGValJSON u ->
Q.toPrepVal u
PGValJSONB u ->
Q.toPrepVal u
PGValGeo o ->
Q.toPrepVal $ TL.toStrict $ AE.encodeToLazyText o
PGValUnknown t ->
textToPrepVal t
toPrepParam :: Int -> PGScalarType -> S.SQLExp
toPrepParam i ty = withGeoVal ty $ S.SEPrep i
textToPrepVal :: Text -> Q.PrepArg
textToPrepVal t =
(PTI.auto, Just (TE.encodeUtf8 t, PQ.Text))
toBinaryValue :: WithScalarType PGScalarValue -> Q.PrepArg
toBinaryValue = binEncoder . pstValue
parsePGValue' :: PGColType
-> Value
-> AT.Parser PGColValue
parsePGValue' ty Null =
return $ PGNull ty
parsePGValue' PGSmallInt val =
PGValSmallInt <$> parseJSON val
parsePGValue' PGInteger val =
PGValInteger <$> parseJSON val
parsePGValue' PGBigInt val =
PGValBigInt <$> parseJSON val
parsePGValue' PGSerial val =
PGValInteger <$> parseJSON val
parsePGValue' PGBigSerial val =
PGValBigInt <$> parseJSON val
parsePGValue' PGFloat val =
PGValFloat <$> parseJSON val
parsePGValue' PGDouble val =
PGValDouble <$> parseJSON val
parsePGValue' PGNumeric val =
PGValNumeric <$> parseJSON val
parsePGValue' PGBoolean val =
PGValBoolean <$> parseJSON val
parsePGValue' PGChar val =
PGValChar <$> parseJSON val
parsePGValue' PGVarchar val =
PGValVarchar <$> parseJSON val
parsePGValue' PGText val =
PGValText <$> parseJSON val
parsePGValue' PGDate val =
PGValDate <$> parseJSON val
parsePGValue' PGTimeStampTZ val =
PGValTimeStampTZ <$> parseJSON val
parsePGValue' PGTimeTZ val =
PGValTimeTZ <$> parseJSON val
parsePGValue' PGJSON val =
PGValJSON . Q.JSON <$> parseJSON val
parsePGValue' PGJSONB val =
PGValJSONB . Q.JSONB <$> parseJSON val
parsePGValue' PGGeometry val =
PGValGeo <$> parseJSON val
parsePGValue' PGGeography val =
PGValGeo <$> parseJSON val
parsePGValue' (PGUnknown _) (String t) =
return $ PGValUnknown t
parsePGValue' (PGUnknown tyName) _ =
fail $ "A string is expected for type : " ++ T.unpack tyName
parsePGValue :: PGColType -> Value -> AT.Parser PGColValue
parsePGValue pct val =
case val of
String t -> parsePGValue' pct val <|> return (PGValUnknown t)
_ -> parsePGValue' pct val
convToBin :: PGColType
-> Value
-> AT.Parser Q.PrepArg
convToBin ty val =
binEncoder <$> parsePGValue ty val
convToTxt :: PGColType
-> Value
-> AT.Parser S.SQLExp
convToTxt ty val =
toTxtValue ty <$> parsePGValue ty val
readEitherTxt :: (Read a) => T.Text -> Either String a
readEitherTxt = readEither . T.unpack
iresToEither :: IResult a -> Either String a
iresToEither (IError _ msg) = Left msg
iresToEither (ISuccess a) = return a
pgValFromJVal :: (FromJSON a) => Value -> Either String a
pgValFromJVal = iresToEither . ifromJSON
withGeoVal :: PGColType -> S.SQLExp -> S.SQLExp
withGeoVal ty v =
bool v applyGeomFromGeoJson isGeoTy
where
applyGeomFromGeoJson =
S.SEFnApp "ST_GeomFromGeoJSON" [v] Nothing
isGeoTy = case ty of
PGGeometry -> True
PGGeography -> True
_ -> False
toPrepParam :: Int -> PGColType -> S.SQLExp
toPrepParam i ty =
withGeoVal ty $ S.SEPrep i
toTxtValue :: PGColType -> PGColValue -> S.SQLExp
toTxtValue ty val =
S.withTyAnn ty txtVal
where
txtVal = withGeoVal ty $ txtEncoder val
pgColValueToInt :: PGColValue -> Maybe Int
pgColValueToInt (PGValInteger i) = Just $ fromIntegral i
pgColValueToInt (PGValSmallInt i) = Just $ fromIntegral i
pgColValueToInt (PGValBigInt i) = Just $ fromIntegral i
pgColValueToInt _ = Nothing
toTxtValue :: WithScalarType PGScalarValue -> S.SQLExp
toTxtValue (WithScalarType ty val) = S.withTyAnn ty . withGeoVal ty $ txtEncoder val

View File

@ -45,7 +45,7 @@ import qualified Hasura.Server.PGDump as PGD
import Hasura.EncJSON
import Hasura.Prelude hiding (get, put)
import Hasura.RQL.DDL.Schema.Table
import Hasura.RQL.DDL.Schema
import Hasura.RQL.Types
import Hasura.Server.Auth (AuthMode (..),
getUserInfo)

View File

@ -656,11 +656,11 @@ connInfoErrModifier :: String -> String
connInfoErrModifier s = "Fatal Error : " ++ s
mkConnInfo ::RawConnInfo -> Either String Q.ConnInfo
mkConnInfo (RawConnInfo mHost mPort mUser pass mURL mDB opts mRetries) =
mkConnInfo (RawConnInfo mHost mPort mUser password mURL mDB opts mRetries) =
case (mHost, mPort, mUser, mDB, mURL) of
(Just host, Just port, Just user, Just db, Nothing) ->
return $ Q.ConnInfo host port user pass db opts retries
return $ Q.ConnInfo host port user password db opts retries
(_, _, _, _, Just dbURL) -> maybe (throwError invalidUrlMsg)
withRetries $ parseDatabaseUrl dbURL opts

View File

@ -16,8 +16,7 @@ import Hasura.RQL.DDL.QueryCollection
import Hasura.RQL.DDL.Relationship
import Hasura.RQL.DDL.Relationship.Rename
import Hasura.RQL.DDL.RemoteSchema
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.DDL.Schema.Table
import Hasura.RQL.DDL.Schema
import Hasura.RQL.DML.Count
import Hasura.RQL.DML.Delete
import Hasura.RQL.DML.Insert
@ -33,6 +32,7 @@ data RQLQuery
= RQAddExistingTableOrView !TrackTable
| RQTrackTable !TrackTable
| RQUntrackTable !UntrackTable
| RQSetTableIsEnum !SetTableIsEnum
| RQTrackFunction !TrackFunction
| RQUntrackFunction !UnTrackFunction
@ -173,6 +173,7 @@ queryNeedsReload qi = case qi of
RQUntrackTable _ -> True
RQTrackFunction _ -> True
RQUntrackFunction _ -> True
RQSetTableIsEnum _ -> True
RQCreateObjectRelationship _ -> True
RQCreateArrayRelationship _ -> True
@ -242,6 +243,7 @@ runQueryM rq =
RQAddExistingTableOrView q -> runTrackTableQ q
RQTrackTable q -> runTrackTableQ q
RQUntrackTable q -> runUntrackTableQ q
RQSetTableIsEnum q -> runSetExistingTableIsEnumQ q
RQTrackFunction q -> runTrackFunc q
RQUntrackFunction q -> runUntrackFunc q

View File

@ -5,10 +5,10 @@ where
import Hasura.Prelude
import Hasura.Logging
import Hasura.RQL.DDL.Schema.Table (buildSCWithoutSetup)
import Hasura.RQL.DDL.Schema (buildSchemaCacheWithoutSetup)
import Hasura.RQL.Types
import Hasura.Server.App (SchemaCacheRef (..), withSCUpdate)
import Hasura.Server.Init (InstanceId (..))
import Hasura.Server.App (SchemaCacheRef (..), withSCUpdate)
import Hasura.Server.Init (InstanceId (..))
import Hasura.Server.Logging
import Hasura.Server.Query
@ -16,13 +16,13 @@ import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import qualified Control.Concurrent as C
import qualified Control.Concurrent.STM as STM
import qualified Data.Text as T
import qualified Data.Time as UTC
import qualified Database.PG.Query as PG
import qualified Database.PostgreSQL.LibPQ as PQ
import qualified Network.HTTP.Client as HTTP
import qualified Control.Concurrent as C
import qualified Control.Concurrent.STM as STM
import qualified Data.Text as T
import qualified Data.Time as UTC
import qualified Database.PG.Query as PG
import qualified Database.PostgreSQL.LibPQ as PQ
import qualified Network.HTTP.Client as HTTP
pgChannel :: PG.PGChannel
pgChannel = "hasura_schema_update"
@ -204,7 +204,7 @@ refreshSchemaCache sqlGenCtx pool logger httpManager cacheRef threadType msg = d
-- Reload schema cache from catalog
resE <- liftIO $ runExceptT $ withSCUpdate cacheRef logger $
peelRun emptySchemaCache adminUserInfo
httpManager sqlGenCtx (PGExecCtx pool PG.Serializable) buildSCWithoutSetup
httpManager sqlGenCtx (PGExecCtx pool PG.Serializable) buildSchemaCacheWithoutSetup
case resE of
Left e -> logError logger threadType $ TEQueryError e
Right _ ->

View File

@ -58,6 +58,7 @@ data Metrics
= Metrics
{ _mtTables :: !Int
, _mtViews :: !Int
, _mtEnumTables :: !Int
, _mtRelationships :: !RelationshipMetric
, _mtPermissions :: !PermissionMetric
, _mtEventTriggers :: !Int
@ -128,12 +129,13 @@ runTelemetry (Logger logger) manager cacheRef dbId instanceId = do
computeMetrics :: SchemaCache -> Metrics
computeMetrics sc =
let nTables = Map.size $ Map.filter (isNothing . tiViewInfo) usrTbls
nViews = Map.size $ Map.filter (isJust . tiViewInfo) usrTbls
allRels = join $ Map.elems $ Map.map relsOfTbl usrTbls
let nTables = countUserTables (isNothing . _tiViewInfo)
nViews = countUserTables (isJust . _tiViewInfo)
nEnumTables = countUserTables (isJust . _tiEnumValues)
allRels = join $ Map.elems $ Map.map relsOfTbl userTables
(manualRels, autoRels) = partition riIsManual allRels
relMetrics = RelationshipMetric (length manualRels) (length autoRels)
rolePerms = join $ Map.elems $ Map.map permsOfTbl usrTbls
rolePerms = join $ Map.elems $ Map.map permsOfTbl userTables
nRoles = length $ nub $ fst <$> rolePerms
allPerms = snd <$> rolePerms
insPerms = calcPerms _permIns allPerms
@ -143,23 +145,24 @@ computeMetrics sc =
permMetrics =
PermissionMetric selPerms insPerms updPerms delPerms nRoles
evtTriggers = Map.size $ Map.filter (not . Map.null)
$ Map.map tiEventTriggerInfoMap usrTbls
$ Map.map _tiEventTriggerInfoMap userTables
rmSchemas = Map.size $ scRemoteSchemas sc
funcs = Map.size $ Map.filter (not . fiSystemDefined) $ scFunctions sc
in Metrics nTables nViews relMetrics permMetrics evtTriggers rmSchemas funcs
in Metrics nTables nViews nEnumTables relMetrics permMetrics evtTriggers rmSchemas funcs
where
usrTbls = Map.filter (not . tiSystemDefined) $ scTables sc
userTables = Map.filter (not . _tiSystemDefined) $ scTables sc
countUserTables predicate = length . filter predicate $ Map.elems userTables
calcPerms :: (RolePermInfo -> Maybe a) -> [RolePermInfo] -> Int
calcPerms fn perms = length $ catMaybes $ map fn perms
relsOfTbl :: TableInfo -> [RelInfo]
relsOfTbl = rights . Map.elems . Map.map fieldInfoToEither . tiFieldInfoMap
relsOfTbl :: TableInfo PGColumnInfo -> [RelInfo]
relsOfTbl = rights . Map.elems . Map.map fieldInfoToEither . _tiFieldInfoMap
permsOfTbl :: TableInfo -> [(RoleName, RolePermInfo)]
permsOfTbl = Map.toList . tiRolePermInfoMap
permsOfTbl :: TableInfo PGColumnInfo -> [(RoleName, RolePermInfo)]
permsOfTbl = Map.toList . _tiRolePermInfoMap
getDbId :: Q.TxE QErr Text

View File

@ -14,38 +14,28 @@ from
select
coalesce(json_agg(
json_build_object(
'table',
json_build_object(
'name', json_build_object(
'name', ht.table_name,
'schema', ht.table_schema
),
'system_defined', ht.is_system_defined,
'info', tables.info
'is_enum', ht.is_enum,
'is_system_defined', ht.is_system_defined,
'info', t.info
)
), '[]') as items
from
hdb_catalog.hdb_table as ht
left outer join (
select
table_schema,
table_name,
json_build_object(
'name',
json_build_object(
'schema', table_schema,
'name', table_name
),
'columns', columns,
'primary_key_columns', primary_key_columns,
'constraints', constraints,
'view_info', view_info
) as info
from
hdb_catalog.hdb_table_info_agg
) as tables on (
tables.table_schema = ht.table_schema
and tables.table_name = ht.table_name
)
from hdb_catalog.hdb_table as ht
left outer join (
select
table_schema,
table_name,
jsonb_build_object(
'columns', columns,
'primary_key_columns', primary_key_columns,
'constraints', constraints,
'view_info', view_info
) as info
from hdb_catalog.hdb_table_info_agg
) as t using (table_schema, table_name)
) as tables,
(
select

View File

@ -14,26 +14,11 @@ CREATE TABLE hdb_catalog.hdb_table
table_schema TEXT,
table_name TEXT,
is_system_defined boolean default false,
is_enum boolean NOT NULL DEFAULT false,
PRIMARY KEY (table_schema, table_name)
);
CREATE FUNCTION hdb_catalog.hdb_table_oid_check() RETURNS trigger AS
$function$
BEGIN
IF (EXISTS (SELECT 1 FROM information_schema.tables st WHERE st.table_schema = NEW.table_schema AND st.table_name = NEW.table_name)) THEN
return NEW;
ELSE
RAISE foreign_key_violation using message = 'table_schema, table_name not in information_schema.tables';
return NULL;
END IF;
END;
$function$
LANGUAGE plpgsql;
CREATE TRIGGER hdb_table_oid_check BEFORE INSERT OR UPDATE ON hdb_catalog.hdb_table
FOR EACH ROW EXECUTE PROCEDURE hdb_catalog.hdb_table_oid_check();
CREATE TABLE hdb_catalog.hdb_relationship
(
table_schema TEXT,
@ -83,7 +68,9 @@ SELECT
min(q.ref_table) :: text as ref_table,
json_object_agg(ac.attname, afc.attname) as column_mapping,
min(q.confupdtype) :: text as on_update,
min(q.confdeltype) :: text as on_delete
min(q.confdeltype) :: text as on_delete,
json_agg(ac.attname) as columns,
json_agg(afc.attname) as ref_columns
FROM
(SELECT
ctn.nspname AS table_schema,
@ -431,6 +418,37 @@ CREATE TRIGGER hdb_schema_update_event_notifier AFTER INSERT OR UPDATE ON
hdb_catalog.hdb_schema_update_event FOR EACH ROW EXECUTE PROCEDURE
hdb_catalog.hdb_schema_update_event_notifier();
CREATE VIEW hdb_catalog.hdb_column AS
WITH primary_key_references AS (
SELECT fkey.table_schema AS src_table_schema
, fkey.table_name AS src_table_name
, fkey.columns->>0 AS src_column_name
, json_agg(json_build_object(
'schema', fkey.ref_table_table_schema,
'name', fkey.ref_table
)) AS ref_tables
FROM hdb_catalog.hdb_foreign_key_constraint AS fkey
JOIN hdb_catalog.hdb_primary_key AS pkey
ON pkey.table_schema = fkey.ref_table_table_schema
AND pkey.table_name = fkey.ref_table
AND pkey.columns::jsonb = fkey.ref_columns::jsonb
WHERE json_array_length(fkey.columns) = 1
GROUP BY fkey.table_schema
, fkey.table_name
, fkey.columns->>0)
SELECT columns.table_schema
, columns.table_name
, columns.column_name AS name
, columns.udt_name AS type
, columns.is_nullable
, columns.ordinal_position
, coalesce(pkey_refs.ref_tables, '[]') AS primary_key_references
FROM information_schema.columns
LEFT JOIN primary_key_references AS pkey_refs
ON columns.table_schema = pkey_refs.src_table_schema
AND columns.table_name = pkey_refs.src_table_name
AND columns.column_name = pkey_refs.src_column_name;
CREATE VIEW hdb_catalog.hdb_table_info_agg AS (
select
tables.table_name as table_name,
@ -447,16 +465,14 @@ from
c.table_schema,
json_agg(
json_build_object(
'name',
column_name,
'type',
udt_name,
'is_nullable',
is_nullable :: boolean
'name', name,
'type', type,
'is_nullable', is_nullable :: boolean,
'references', primary_key_references
)
) as columns
from
information_schema.columns c
hdb_catalog.hdb_column c
group by
c.table_schema,
c.table_name

View File

@ -0,0 +1,158 @@
ALTER TABLE hdb_catalog.hdb_table
ADD COLUMN is_enum boolean NOT NULL DEFAULT false;
DROP TRIGGER hdb_table_oid_check ON hdb_catalog.hdb_table;
DROP FUNCTION hdb_catalog.hdb_table_oid_check();
CREATE OR REPLACE VIEW hdb_catalog.hdb_foreign_key_constraint AS
SELECT
q.table_schema :: text,
q.table_name :: text,
q.constraint_name :: text,
min(q.constraint_oid) :: integer as constraint_oid,
min(q.ref_table_table_schema) :: text as ref_table_table_schema,
min(q.ref_table) :: text as ref_table,
json_object_agg(ac.attname, afc.attname) as column_mapping,
min(q.confupdtype) :: text as on_update,
min(q.confdeltype) :: text as on_delete,
json_agg(ac.attname) as columns,
json_agg(afc.attname) as ref_columns
FROM
(SELECT
ctn.nspname AS table_schema,
ct.relname AS table_name,
r.conrelid AS table_id,
r.conname as constraint_name,
r.oid as constraint_oid,
cftn.nspname AS ref_table_table_schema,
cft.relname as ref_table,
r.confrelid as ref_table_id,
r.confupdtype,
r.confdeltype,
UNNEST (r.conkey) AS column_id,
UNNEST (r.confkey) AS ref_column_id
FROM
pg_catalog.pg_constraint r
JOIN pg_catalog.pg_class ct
ON r.conrelid = ct.oid
JOIN pg_catalog.pg_namespace ctn
ON ct.relnamespace = ctn.oid
JOIN pg_catalog.pg_class cft
ON r.confrelid = cft.oid
JOIN pg_catalog.pg_namespace cftn
ON cft.relnamespace = cftn.oid
WHERE
r.contype = 'f'
) q
JOIN pg_catalog.pg_attribute ac
ON q.column_id = ac.attnum
AND q.table_id = ac.attrelid
JOIN pg_catalog.pg_attribute afc
ON q.ref_column_id = afc.attnum
AND q.ref_table_id = afc.attrelid
GROUP BY q.table_schema, q.table_name, q.constraint_name;
CREATE VIEW hdb_catalog.hdb_column AS
WITH primary_key_references AS (
SELECT fkey.table_schema AS src_table_schema
, fkey.table_name AS src_table_name
, fkey.columns->>0 AS src_column_name
, json_agg(json_build_object(
'schema', fkey.ref_table_table_schema,
'name', fkey.ref_table
)) AS ref_tables
FROM hdb_catalog.hdb_foreign_key_constraint AS fkey
JOIN hdb_catalog.hdb_primary_key AS pkey
ON pkey.table_schema = fkey.ref_table_table_schema
AND pkey.table_name = fkey.ref_table
AND pkey.columns::jsonb = fkey.ref_columns::jsonb
WHERE json_array_length(fkey.columns) = 1
GROUP BY fkey.table_schema
, fkey.table_name
, fkey.columns->>0)
SELECT columns.table_schema
, columns.table_name
, columns.column_name AS name
, columns.udt_name AS type
, columns.is_nullable
, columns.ordinal_position
, coalesce(pkey_refs.ref_tables, '[]') AS primary_key_references
FROM information_schema.columns
LEFT JOIN primary_key_references AS pkey_refs
ON columns.table_schema = pkey_refs.src_table_schema
AND columns.table_name = pkey_refs.src_table_name
AND columns.column_name = pkey_refs.src_column_name;
CREATE OR REPLACE VIEW hdb_catalog.hdb_table_info_agg AS (
select
tables.table_name as table_name,
tables.table_schema as table_schema,
coalesce(columns.columns, '[]') as columns,
coalesce(pk.columns, '[]') as primary_key_columns,
coalesce(constraints.constraints, '[]') as constraints,
coalesce(views.view_info, 'null') as view_info
from
information_schema.tables as tables
left outer join (
select
c.table_name,
c.table_schema,
json_agg(
json_build_object(
'name', name,
'type', type,
'is_nullable', is_nullable :: boolean,
'references', primary_key_references
)
) as columns
from
hdb_catalog.hdb_column c
group by
c.table_schema,
c.table_name
) columns on (
tables.table_schema = columns.table_schema
AND tables.table_name = columns.table_name
)
left outer join (
select * from hdb_catalog.hdb_primary_key
) pk on (
tables.table_schema = pk.table_schema
AND tables.table_name = pk.table_name
)
left outer join (
select
c.table_schema,
c.table_name,
json_agg(constraint_name) as constraints
from
information_schema.table_constraints c
where
c.constraint_type = 'UNIQUE'
or c.constraint_type = 'PRIMARY KEY'
group by
c.table_schema,
c.table_name
) constraints on (
tables.table_schema = constraints.table_schema
AND tables.table_name = constraints.table_name
)
left outer join (
select
table_schema,
table_name,
json_build_object(
'is_updatable',
(is_updatable::boolean OR is_trigger_updatable::boolean),
'is_deletable',
(is_updatable::boolean OR is_trigger_deletable::boolean),
'is_insertable',
(is_insertable_into::boolean OR is_trigger_insertable_into::boolean)
) as view_info
from
information_schema.views v
) views on (
tables.table_schema = views.table_schema
AND tables.table_name = views.table_name
)
);

View File

@ -20,21 +20,16 @@ FROM
table_schema,
table_name,
json_agg(
(
SELECT
r
FROM
(
SELECT
column_name,
udt_name AS data_type,
ordinal_position,
is_nullable :: boolean
) r
json_build_object(
'column_name', name,
'data_type', type,
'is_nullable', is_nullable :: boolean,
'ordinal_position', ordinal_position,
'references', primary_key_references
)
) as columns
FROM
information_schema.columns
hdb_catalog.hdb_column
GROUP BY
table_schema,
table_name

View File

@ -32,6 +32,7 @@ extra-deps:
- reroute-0.5.0.0
- Spock-core-0.13.0.0
- monad-validate-1.2.0.0
# Override default flag values for local packages and extra-deps
flags: {}

View File

@ -95,6 +95,13 @@ packages:
sha256: 86140298020f68bb09d07b26a6a6f1666fc3a02715d7986b09150727247a1a84
original:
hackage: Spock-core-0.13.0.0
- completed:
hackage: monad-validate-1.2.0.0@sha256:9850f408431098b28806dd464b6825a88a0b56c84f380d7fe0454c1df9d6f881,3505
pantry-tree:
size: 713
sha256: 8e049bd12ce2bd470909578f2ee8eb80b89d5ff88860afa30e29dd4eafecfa3e
original:
hackage: monad-validate-1.2.0.0
snapshots:
- completed:
size: 498167

View File

@ -0,0 +1,21 @@
description: Test deleting records filtered by an enum reference
url: /v1/graphql
status: 200
response:
data:
delete_users:
affected_rows: 1
returning:
- name: Alyssa
favorite_color: red
query:
query: |
mutation {
delete_users(where: {favorite_color: {_eq: red}}) {
affected_rows
returning {
name
favorite_color
}
}
}

View File

@ -0,0 +1,24 @@
description: Test inserting a record that references an enum table
url: /v1/graphql
status: 200
response:
data:
insert_users:
returning:
- name: Matthew
favorite_color: yellow
- name: Robby
favorite_color: purple
query:
query: |
mutation {
insert_users(objects: [
{ name: "Matthew", favorite_color: yellow },
{ name: "Robby", favorite_color: purple }
]) {
returning {
name
favorite_color
}
}
}

View File

@ -0,0 +1,19 @@
description: Test inserting a record with an invalid enum value
url: /v1/graphql
status: 200
response:
errors:
- message: 'unexpected value "not_a_real_color" for enum: ''colors_enum'''
extensions:
code: validation-failed
path: $.selectionSet.insert_users.args.objects[0].favorite_color
query:
query: |
mutation {
insert_users(objects: [{ name: "Matthew", favorite_color: not_a_real_color }]) {
returning {
name
favorite_color
}
}
}

View File

@ -0,0 +1,28 @@
type: bulk
args:
- type: run_sql
args:
sql: |
CREATE TABLE colors
( value text PRIMARY KEY
, comment text );
INSERT INTO colors (value, comment) VALUES
('red', '#FF0000'),
('green', '#00FF00'),
('blue', '#0000FF'),
('orange', '#FFFF00'),
('yellow', '#00FFFF'),
('purple', '#FF00FF');
CREATE TABLE users
( id serial PRIMARY KEY
, name text NOT NULL
, favorite_color text NOT NULL REFERENCES colors );
- type: track_table
args:
table: colors
is_enum: true
- type: track_table
args: users

View File

@ -0,0 +1,8 @@
type: bulk
args:
- type: run_sql
args:
sql: |
DROP TABLE users;
DROP TABLE colors;
cascade: true

View File

@ -0,0 +1,21 @@
description: Test updating a record that references an enum table
url: /v1/graphql
status: 200
response:
data:
update_users:
affected_rows: 1
returning:
- name: Alyssa
favorite_color: blue
query:
query: |
mutation {
update_users(where: {id: {_eq: 1}}, _set: {favorite_color: blue}) {
affected_rows
returning {
name
favorite_color
}
}
}

View File

@ -0,0 +1,21 @@
description: Test updating records filtered by an enum reference
url: /v1/graphql
status: 200
response:
data:
update_users:
affected_rows: 1
returning:
- name: Alyssa
favorite_color: blue
query:
query: |
mutation {
update_users(where: {favorite_color: {_eq: red}}, _set: {favorite_color: blue}) {
affected_rows
returning {
name
favorite_color
}
}
}

View File

@ -0,0 +1,11 @@
type: bulk
args:
- type: insert
args:
table: users
objects:
- name: Alyssa
favorite_color: red
- name: Ben
favorite_color: blue

View File

@ -0,0 +1,8 @@
type: bulk
args:
- type: run_sql
args:
sql: |
DELETE FROM users;
SELECT setval('users_id_seq', 1, FALSE);

View File

@ -7,7 +7,7 @@ response:
code: validation-failed
path: $.selectionSet.author.args.where.name._ne
message: |-
field "_ne" not found in type: 'text_comparison_exp'
field "_ne" not found in type: 'String_comparison_exp'
query:
query: |
query {

View File

@ -7,7 +7,7 @@ response:
code: validation-failed
path: $.selectionSet.author.args.where.id._unexpected
message: |-
field "_unexpected" not found in type: 'integer_comparison_exp'
field "_unexpected" not found in type: 'Int_comparison_exp'
query:
query: |
query {

Some files were not shown because too many files have changed in this diff Show More