mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 08:02:15 +03:00
Add support for GraphQL enum types via enum table references
These changes also add a new type, PGColumnType, between PGColInfo and PGScalarType, and they process PGRawColumnType values into PGColumnType values during schema cache generation.
This commit is contained in:
parent
86663f9af7
commit
ed26da59a6
@ -159,7 +159,7 @@ jobs:
|
||||
# build the server binary, and package into docker image
|
||||
build_server:
|
||||
docker:
|
||||
- image: hasura/graphql-engine-server-builder:20190507-1
|
||||
- image: hasura/graphql-engine-server-builder:20190811
|
||||
working_directory: ~/graphql-engine
|
||||
steps:
|
||||
- attach_workspace:
|
||||
@ -235,7 +235,7 @@ jobs:
|
||||
environment:
|
||||
PG_VERSION: "11_1"
|
||||
docker:
|
||||
- image: hasura/graphql-engine-server-builder:20190507-1
|
||||
- image: hasura/graphql-engine-server-builder:20190811
|
||||
# TODO: change this to circleci postgis when they have one for pg 11
|
||||
- image: mdillon/postgis:11-alpine
|
||||
<<: *test_pg_env
|
||||
@ -245,7 +245,7 @@ jobs:
|
||||
environment:
|
||||
PG_VERSION: "10_6"
|
||||
docker:
|
||||
- image: hasura/graphql-engine-server-builder:20190507-1
|
||||
- image: hasura/graphql-engine-server-builder:20190811
|
||||
- image: circleci/postgres:10.6-alpine-postgis
|
||||
<<: *test_pg_env
|
||||
|
||||
@ -254,7 +254,7 @@ jobs:
|
||||
environment:
|
||||
PG_VERSION: "9_6"
|
||||
docker:
|
||||
- image: hasura/graphql-engine-server-builder:20190507-1
|
||||
- image: hasura/graphql-engine-server-builder:20190811
|
||||
- image: circleci/postgres:9.6-alpine-postgis
|
||||
<<: *test_pg_env
|
||||
|
||||
@ -263,7 +263,7 @@ jobs:
|
||||
environment:
|
||||
PG_VERSION: "9_5"
|
||||
docker:
|
||||
- image: hasura/graphql-engine-server-builder:20190507-1
|
||||
- image: hasura/graphql-engine-server-builder:20190811
|
||||
- image: circleci/postgres:9.5-alpine-postgis
|
||||
<<: *test_pg_env
|
||||
|
||||
|
@ -28,3 +28,5 @@ RUN apt-get -y update \
|
||||
&& rm -rf /usr/share/doc/ \
|
||||
&& rm -rf /usr/share/man/ \
|
||||
&& rm -rf /usr/share/locale/
|
||||
|
||||
ENV LANG=C.UTF-8 LC_ALL=C.UTF-8
|
||||
|
@ -34,7 +34,6 @@ var testMetadataPrev = map[string][]byte{
|
||||
"metadata": []byte(`allowlist: []
|
||||
functions: []
|
||||
query_collections: []
|
||||
query_templates: []
|
||||
remote_schemas: []
|
||||
tables:
|
||||
- array_relationships: []
|
||||
@ -49,7 +48,6 @@ tables:
|
||||
"empty-metadata": []byte(`allowlist: []
|
||||
functions: []
|
||||
query_collections: []
|
||||
query_templates: []
|
||||
remote_schemas: []
|
||||
tables: []
|
||||
`),
|
||||
@ -65,6 +63,7 @@ tables:
|
||||
delete_permissions: []
|
||||
event_triggers: []
|
||||
insert_permissions: []
|
||||
is_enum: false
|
||||
object_relationships: []
|
||||
select_permissions: []
|
||||
table: test
|
||||
@ -264,7 +263,7 @@ func mustWriteFile(t testing.TB, dir, file string, body string) {
|
||||
|
||||
func compareMetadata(t testing.TB, metadataFile string, actualType string, serverVersion *semver.Version) {
|
||||
var actualData []byte
|
||||
c, err := semver.NewConstraint("<= v1.0.0-beta.3")
|
||||
c, err := semver.NewConstraint("<= v1.0.0-beta.5")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
@ -56,6 +56,7 @@ library
|
||||
, containers
|
||||
, monad-control
|
||||
, monad-time
|
||||
, monad-validate
|
||||
, fast-logger
|
||||
, wai
|
||||
, postgresql-binary
|
||||
@ -171,16 +172,17 @@ library
|
||||
, Hasura.RQL.Instances
|
||||
, Hasura.RQL.Types.SchemaCache
|
||||
, Hasura.RQL.Types.SchemaCacheTypes
|
||||
, Hasura.RQL.Types.Common
|
||||
, Hasura.RQL.Types.Catalog
|
||||
, Hasura.RQL.Types.BoolExp
|
||||
, Hasura.RQL.Types.Permission
|
||||
, Hasura.RQL.Types.Error
|
||||
, Hasura.RQL.Types.Catalog
|
||||
, Hasura.RQL.Types.Column
|
||||
, Hasura.RQL.Types.Common
|
||||
, Hasura.RQL.Types.DML
|
||||
, Hasura.RQL.Types.Error
|
||||
, Hasura.RQL.Types.EventTrigger
|
||||
, Hasura.RQL.Types.RemoteSchema
|
||||
, Hasura.RQL.Types.Metadata
|
||||
, Hasura.RQL.Types.Permission
|
||||
, Hasura.RQL.Types.QueryCollection
|
||||
, Hasura.RQL.Types.RemoteSchema
|
||||
, Hasura.RQL.DDL.Deps
|
||||
, Hasura.RQL.DDL.Permission.Internal
|
||||
, Hasura.RQL.DDL.Permission.Triggers
|
||||
@ -188,10 +190,11 @@ library
|
||||
, Hasura.RQL.DDL.Relationship
|
||||
, Hasura.RQL.DDL.Relationship.Rename
|
||||
, Hasura.RQL.DDL.Relationship.Types
|
||||
, Hasura.RQL.DDL.Schema.Table
|
||||
, Hasura.RQL.DDL.Schema.Rename
|
||||
, Hasura.RQL.DDL.Schema.Function
|
||||
, Hasura.RQL.DDL.Schema.Enum
|
||||
, Hasura.RQL.DDL.Schema.Diff
|
||||
, Hasura.RQL.DDL.Schema.Function
|
||||
, Hasura.RQL.DDL.Schema.Rename
|
||||
, Hasura.RQL.DDL.Schema.Table
|
||||
, Hasura.RQL.DDL.Metadata
|
||||
, Hasura.RQL.DDL.Utils
|
||||
, Hasura.RQL.DDL.EventTrigger
|
||||
@ -258,6 +261,7 @@ library
|
||||
|
||||
, Hasura.HTTP
|
||||
|
||||
, Control.Lens.Extended
|
||||
, Data.Text.Extended
|
||||
, Data.Aeson.Extended
|
||||
, Data.Sequence.NonEmpty
|
||||
@ -266,11 +270,12 @@ library
|
||||
, Data.Parser.JSONPath
|
||||
|
||||
, Hasura.SQL.DML
|
||||
, Hasura.SQL.Error
|
||||
, Hasura.SQL.GeoJSON
|
||||
, Hasura.SQL.Rewrite
|
||||
, Hasura.SQL.Time
|
||||
, Hasura.SQL.Types
|
||||
, Hasura.SQL.Value
|
||||
, Hasura.SQL.GeoJSON
|
||||
, Hasura.SQL.Time
|
||||
, Hasura.SQL.Rewrite
|
||||
, Network.URI.Extended
|
||||
, Ops
|
||||
, Migrate
|
||||
@ -278,28 +283,29 @@ library
|
||||
other-modules: Hasura.Server.Auth.JWT.Internal
|
||||
, Hasura.Server.Auth.JWT.Logging
|
||||
|
||||
default-extensions: EmptyCase
|
||||
FlexibleContexts
|
||||
FlexibleInstances
|
||||
InstanceSigs
|
||||
MultiParamTypeClasses
|
||||
LambdaCase
|
||||
MultiWayIf
|
||||
TupleSections
|
||||
default-extensions: ApplicativeDo
|
||||
BangPatterns
|
||||
DeriveDataTypeable
|
||||
DeriveFoldable
|
||||
DeriveFunctor
|
||||
DeriveGeneric
|
||||
DeriveLift
|
||||
DeriveTraversable
|
||||
EmptyCase
|
||||
FlexibleContexts
|
||||
FlexibleInstances
|
||||
GeneralizedNewtypeDeriving
|
||||
BangPatterns
|
||||
InstanceSigs
|
||||
LambdaCase
|
||||
MultiParamTypeClasses
|
||||
MultiWayIf
|
||||
NoImplicitPrelude
|
||||
OverloadedStrings
|
||||
QuasiQuotes
|
||||
ScopedTypeVariables
|
||||
TemplateHaskell
|
||||
QuasiQuotes
|
||||
TupleSections
|
||||
TypeFamilies
|
||||
NoImplicitPrelude
|
||||
DeriveDataTypeable
|
||||
|
||||
|
||||
if flag(profile)
|
||||
@ -308,6 +314,8 @@ library
|
||||
cpp-options: -DDeveloperAPIs
|
||||
|
||||
ghc-options: -O2
|
||||
-foptimal-applicative-do
|
||||
-fdefer-typed-holes
|
||||
-Wall
|
||||
-Wcompat
|
||||
-Wincomplete-record-updates
|
||||
@ -315,27 +323,29 @@ library
|
||||
-Wredundant-constraints
|
||||
|
||||
executable graphql-engine
|
||||
default-extensions: EmptyCase
|
||||
FlexibleContexts
|
||||
FlexibleInstances
|
||||
InstanceSigs
|
||||
MultiParamTypeClasses
|
||||
LambdaCase
|
||||
MultiWayIf
|
||||
TupleSections
|
||||
default-extensions: ApplicativeDo
|
||||
BangPatterns
|
||||
DeriveDataTypeable
|
||||
DeriveFoldable
|
||||
DeriveFunctor
|
||||
DeriveGeneric
|
||||
DeriveLift
|
||||
DeriveTraversable
|
||||
EmptyCase
|
||||
FlexibleContexts
|
||||
FlexibleInstances
|
||||
GeneralizedNewtypeDeriving
|
||||
BangPatterns
|
||||
InstanceSigs
|
||||
LambdaCase
|
||||
MultiParamTypeClasses
|
||||
MultiWayIf
|
||||
NoImplicitPrelude
|
||||
OverloadedStrings
|
||||
QuasiQuotes
|
||||
ScopedTypeVariables
|
||||
TemplateHaskell
|
||||
QuasiQuotes
|
||||
TupleSections
|
||||
TypeFamilies
|
||||
NoImplicitPrelude
|
||||
|
||||
main-is: Main.hs
|
||||
default-language: Haskell2010
|
||||
@ -368,6 +378,8 @@ executable graphql-engine
|
||||
ghc-prof-options: -rtsopts -fprof-auto -fno-prof-count-entries
|
||||
|
||||
ghc-options: -O2
|
||||
-foptimal-applicative-do
|
||||
-fdefer-typed-holes
|
||||
-Wall
|
||||
-Wcompat
|
||||
-Wincomplete-record-updates
|
||||
|
@ -19,7 +19,7 @@ import qualified Data.Yaml.TH as Y
|
||||
import qualified Database.PG.Query as Q
|
||||
|
||||
curCatalogVer :: T.Text
|
||||
curCatalogVer = "19"
|
||||
curCatalogVer = "20"
|
||||
|
||||
migrateMetadata
|
||||
:: ( MonadTx m
|
||||
@ -344,6 +344,12 @@ from18To19 = do
|
||||
$(Q.sqlFromFile "src-rsr/migrate_from_18_to_19.sql")
|
||||
return ()
|
||||
|
||||
from19To20 :: (MonadTx m) => m ()
|
||||
from19To20 = do
|
||||
Q.Discard () <- liftTx $ Q.multiQE defaultTxErrorHandler
|
||||
$(Q.sqlFromFile "src-rsr/migrate_from_19_to_20.sql")
|
||||
pure ()
|
||||
|
||||
migrateCatalog
|
||||
:: ( MonadTx m
|
||||
, CacheRWM m
|
||||
@ -353,70 +359,39 @@ migrateCatalog
|
||||
, HasSQLGenCtx m
|
||||
)
|
||||
=> UTCTime -> m String
|
||||
migrateCatalog migrationTime = do
|
||||
preVer <- getCatalogVersion
|
||||
if | preVer == curCatalogVer ->
|
||||
return $ "already at the latest version. current version: "
|
||||
<> show curCatalogVer
|
||||
| preVer == "0.8" -> from08ToCurrent
|
||||
| preVer == "1" -> from1ToCurrent
|
||||
| preVer == "2" -> from2ToCurrent
|
||||
| preVer == "3" -> from3ToCurrent
|
||||
| preVer == "4" -> from4ToCurrent
|
||||
| preVer == "5" -> from5ToCurrent
|
||||
| preVer == "6" -> from6ToCurrent
|
||||
| preVer == "7" -> from7ToCurrent
|
||||
| preVer == "8" -> from8ToCurrent
|
||||
| preVer == "9" -> from9ToCurrent
|
||||
| preVer == "10" -> from10ToCurrent
|
||||
| preVer == "11" -> from11ToCurrent
|
||||
| preVer == "12" -> from12ToCurrent
|
||||
| preVer == "13" -> from13ToCurrent
|
||||
| preVer == "14" -> from14ToCurrent
|
||||
| preVer == "15" -> from15ToCurrent
|
||||
| preVer == "16" -> from16ToCurrent
|
||||
| preVer == "17" -> from17ToCurrent
|
||||
| preVer == "18" -> from18ToCurrent
|
||||
| otherwise -> throw400 NotSupported $
|
||||
"unsupported version : " <> preVer
|
||||
migrateCatalog migrationTime = migrateFrom =<< getCatalogVersion
|
||||
where
|
||||
from18ToCurrent = from18To19 >> postMigrate
|
||||
|
||||
from17ToCurrent = from17To18 >> from18ToCurrent
|
||||
|
||||
from16ToCurrent = from16To17 >> from17ToCurrent
|
||||
|
||||
from15ToCurrent = from15To16 >> from16ToCurrent
|
||||
|
||||
from14ToCurrent = from14To15 >> from15ToCurrent
|
||||
|
||||
from13ToCurrent = from13To14 >> from14ToCurrent
|
||||
|
||||
from12ToCurrent = from12To13 >> from13ToCurrent
|
||||
|
||||
from11ToCurrent = from11To12 >> from12ToCurrent
|
||||
|
||||
from10ToCurrent = from10To11 >> from11ToCurrent
|
||||
|
||||
from9ToCurrent = from9To10 >> from10ToCurrent
|
||||
|
||||
from8ToCurrent = from8To9 >> from9ToCurrent
|
||||
|
||||
from7ToCurrent = from7To8 >> from8ToCurrent
|
||||
|
||||
from6ToCurrent = from6To7 >> from7ToCurrent
|
||||
|
||||
from5ToCurrent = from5To6 >> from6ToCurrent
|
||||
|
||||
from4ToCurrent = from4To5 >> from5ToCurrent
|
||||
|
||||
from3ToCurrent = from3To4 >> from4ToCurrent
|
||||
|
||||
from2ToCurrent = from2To3 >> from3ToCurrent
|
||||
|
||||
from1ToCurrent = from1To2 >> from2ToCurrent
|
||||
|
||||
from08ToCurrent = from08To1 >> from1ToCurrent
|
||||
migrateFrom previousVersion
|
||||
| previousVersion == curCatalogVer =
|
||||
return $ "already at the latest version. current version: " <> show curCatalogVer
|
||||
| [] <- neededMigrations =
|
||||
throw400 NotSupported $ "unsupported version : " <> previousVersion
|
||||
| otherwise =
|
||||
traverse_ snd neededMigrations >> postMigrate
|
||||
where
|
||||
neededMigrations = dropWhile ((/= previousVersion) . fst) migrations
|
||||
migrations =
|
||||
[ ("0.8", from08To1)
|
||||
, ("1", from1To2)
|
||||
, ("2", from2To3)
|
||||
, ("3", from3To4)
|
||||
, ("4", from4To5)
|
||||
, ("5", from5To6)
|
||||
, ("6", from6To7)
|
||||
, ("7", from7To8)
|
||||
, ("8", from8To9)
|
||||
, ("9", from9To10)
|
||||
, ("10", from10To11)
|
||||
, ("11", from11To12)
|
||||
, ("12", from12To13)
|
||||
, ("13", from13To14)
|
||||
, ("14", from14To15)
|
||||
, ("15", from15To16)
|
||||
, ("16", from16To17)
|
||||
, ("17", from17To18)
|
||||
, ("18", from18To19)
|
||||
, ("19", from19To20)
|
||||
]
|
||||
|
||||
postMigrate = do
|
||||
-- update the catalog version
|
||||
|
19
server/src-lib/Control/Lens/Extended.hs
Normal file
19
server/src-lib/Control/Lens/Extended.hs
Normal file
@ -0,0 +1,19 @@
|
||||
module Control.Lens.Extended
|
||||
( module Control.Lens
|
||||
, (^..)
|
||||
, (^@..)
|
||||
) where
|
||||
|
||||
import Control.Lens hiding ((^..), (^@..))
|
||||
import Data.Monoid (Endo)
|
||||
import GHC.Exts (IsList, Item, fromList)
|
||||
|
||||
infixl 8 ^..
|
||||
(^..) :: (IsList l, Item l ~ a) => s -> Getting (Endo [a]) s a -> l
|
||||
v ^.. l = fromList (toListOf l v)
|
||||
{-# INLINE (^..) #-}
|
||||
|
||||
infixl 8 ^@..
|
||||
(^@..) :: (IsList l, Item l ~ (i, a)) => s -> IndexedGetting i (Endo [(i, a)]) s a -> l
|
||||
v ^@.. l = fromList (itoListOf l v)
|
||||
{-# INLINE (^@..) #-}
|
@ -14,6 +14,8 @@ module Hasura.Db
|
||||
, defaultTxErrorHandler
|
||||
) where
|
||||
|
||||
import Control.Monad.Validate
|
||||
|
||||
import qualified Data.Aeson.Extended as J
|
||||
import qualified Database.PG.Query as Q
|
||||
|
||||
@ -34,9 +36,10 @@ class (MonadError QErr m) => MonadTx m where
|
||||
|
||||
instance (MonadTx m) => MonadTx (StateT s m) where
|
||||
liftTx = lift . liftTx
|
||||
|
||||
instance (MonadTx m) => MonadTx (ReaderT s m) where
|
||||
liftTx = lift . liftTx
|
||||
instance (MonadTx m) => MonadTx (ValidateT e m) where
|
||||
liftTx = lift . liftTx
|
||||
|
||||
data LazyTx e a
|
||||
= LTErr !e
|
||||
|
@ -432,7 +432,7 @@ tryWebhook headers responseTimeout ep webhook = do
|
||||
getEventTriggerInfoFromEvent :: SchemaCache -> Event -> Maybe EventTriggerInfo
|
||||
getEventTriggerInfoFromEvent sc e = let table = eTable e
|
||||
tableInfo = M.lookup table $ scTables sc
|
||||
in M.lookup ( tmName $ eTrigger e) =<< (tiEventTriggerInfoMap <$> tableInfo)
|
||||
in M.lookup ( tmName $ eTrigger e) =<< (_tiEventTriggerInfoMap <$> tableInfo)
|
||||
|
||||
fetchEvents :: Q.TxE QErr [Event]
|
||||
fetchEvents =
|
||||
|
@ -166,18 +166,18 @@ toMultiplexedQueryVar
|
||||
=> GR.UnresolvedVal -> m S.SQLExp
|
||||
toMultiplexedQueryVar = \case
|
||||
GR.UVPG annPGVal ->
|
||||
let GR.AnnPGVal varM isNullable colTy colVal = annPGVal
|
||||
let GR.AnnPGVal varM isNullable _ colVal = annPGVal
|
||||
in case (varM, isNullable) of
|
||||
-- we don't check for nullability as
|
||||
-- this is only used for reusable plans
|
||||
-- the check has to be made before this
|
||||
(Just var, _) -> do
|
||||
modify $ Map.insert var (colTy, colVal)
|
||||
return $ fromResVars (PgTypeSimple colTy)
|
||||
modify $ Map.insert var colVal
|
||||
return $ fromResVars (PGTypeSimple $ pstType colVal)
|
||||
[ "variables"
|
||||
, G.unName $ G.unVariable var
|
||||
]
|
||||
_ -> return $ toTxtValue colTy colVal
|
||||
_ -> return $ toTxtValue colVal
|
||||
GR.UVSessVar ty sessVar ->
|
||||
return $ fromResVars ty [ "user", T.toLower sessVar]
|
||||
GR.UVSQL sqlExp -> return sqlExp
|
||||
@ -198,19 +198,19 @@ subsOpFromPGAST
|
||||
, MonadIO m
|
||||
)
|
||||
|
||||
-- | to validate arguments
|
||||
=> PGExecCtx
|
||||
-- ^ to validate arguments
|
||||
|
||||
-- | used as part of an identifier in the underlying live query systems
|
||||
-- to avoid unnecessary load on Postgres where possible
|
||||
-> GH.GQLReqUnparsed
|
||||
-- ^ used as part of an identifier in the underlying live query systems
|
||||
-- to avoid unnecessary load on Postgres where possible
|
||||
|
||||
-- | variable definitions as seen in the subscription, needed in
|
||||
-- checking whether the subscription can be multiplexed or not
|
||||
-> [G.VariableDefinition]
|
||||
-- ^ variable definitions as seen in the subscription, needed in
|
||||
-- checking whether the subscription can be multiplexed or not
|
||||
|
||||
-- | The alias and the partially processed live query field
|
||||
-> (G.Alias, GR.QueryRootFldUnresolved)
|
||||
-- ^ The alias and the partially processed live query field
|
||||
|
||||
-> m (LiveQueryOp, Maybe SubsPlan)
|
||||
subsOpFromPGAST pgExecCtx reqUnparsed varDefs (fldAls, astUnresolved) = do
|
||||
@ -273,10 +273,10 @@ validateAnnVarValsOnPg pgExecCtx annVarVals = do
|
||||
|
||||
Q.Discard _ <- runTx' $ liftTx $
|
||||
Q.rawQE valPgErrHandler (Q.fromBuilder $ toSQL valSel) [] False
|
||||
return $ fmap (txtEncodedPGVal . snd) annVarVals
|
||||
return $ fmap (txtEncodedPGVal . pstValue) annVarVals
|
||||
|
||||
where
|
||||
mkExtrs = map (flip S.Extractor Nothing . uncurry toTxtValue)
|
||||
mkExtrs = map (flip S.Extractor Nothing . toTxtValue)
|
||||
mkValidationSel vars =
|
||||
S.mkSelect { S.selExtr = mkExtrs vars }
|
||||
runTx' tx = do
|
||||
|
@ -34,7 +34,7 @@ import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
import Hasura.SQL.Value
|
||||
|
||||
type PlanVariables = Map.HashMap G.Variable (Int, PGScalarType)
|
||||
type PlanVariables = Map.HashMap G.Variable (Int, PGColumnType)
|
||||
type PrepArgMap = IntMap.IntMap Q.PrepArg
|
||||
|
||||
data PGPlan
|
||||
@ -63,7 +63,7 @@ instance J.ToJSON RootFieldPlan where
|
||||
RFPRaw encJson -> J.toJSON $ TBS.fromBS encJson
|
||||
RFPPostgres pgPlan -> J.toJSON pgPlan
|
||||
|
||||
type VariableTypes = Map.HashMap G.Variable PGScalarType
|
||||
type VariableTypes = Map.HashMap G.Variable PGColumnType
|
||||
|
||||
data QueryPlan
|
||||
= QueryPlan
|
||||
@ -116,9 +116,9 @@ withPlan usrVars (PGPlan q reqVars prepMap) annVars = do
|
||||
where
|
||||
getVar accum (var, (prepNo, _)) = do
|
||||
let varName = G.unName $ G.unVariable var
|
||||
(_, colVal) <- onNothing (Map.lookup var annVars) $
|
||||
colVal <- onNothing (Map.lookup var annVars) $
|
||||
throw500 $ "missing variable in annVars : " <> varName
|
||||
let prepVal = binEncoder colVal
|
||||
let prepVal = toBinaryValue colVal
|
||||
return $ IntMap.insert prepNo prepVal accum
|
||||
|
||||
-- turn the current plan into a transaction
|
||||
@ -156,7 +156,7 @@ initPlanningSt =
|
||||
|
||||
getVarArgNum
|
||||
:: (MonadState PlanningSt m)
|
||||
=> G.Variable -> PGScalarType -> m Int
|
||||
=> G.Variable -> PGColumnType -> m Int
|
||||
getVarArgNum var colTy = do
|
||||
PlanningSt curArgNum vars prepped <- get
|
||||
case Map.lookup var vars of
|
||||
@ -190,15 +190,15 @@ prepareWithPlan = \case
|
||||
argNum <- case (varM, isNullable) of
|
||||
(Just var, False) -> getVarArgNum var colTy
|
||||
_ -> getNextArgNum
|
||||
addPrepArg argNum $ binEncoder colVal
|
||||
return $ toPrepParam argNum colTy
|
||||
addPrepArg argNum $ toBinaryValue colVal
|
||||
return $ toPrepParam argNum (pstType colVal)
|
||||
R.UVSessVar ty sessVar -> do
|
||||
let sessVarVal =
|
||||
S.SEOpApp (S.SQLOp "->>")
|
||||
[S.SEPrep 1, S.SELit $ T.toLower sessVar]
|
||||
return $ flip S.SETyAnn (S.mkTypeAnn ty) $ case ty of
|
||||
PgTypeSimple colTy -> withGeoVal colTy sessVarVal
|
||||
PgTypeArray _ -> sessVarVal
|
||||
PGTypeSimple colTy -> withGeoVal colTy sessVarVal
|
||||
PGTypeArray _ -> sessVarVal
|
||||
R.UVSQL sqlExp -> return sqlExp
|
||||
|
||||
queryRootName :: Text
|
||||
|
@ -61,8 +61,8 @@ resolveVal userInfo = \case
|
||||
RS.UVSessVar ty sessVar -> do
|
||||
sessVarVal <- S.SELit <$> getSessVarVal userInfo sessVar
|
||||
return $ flip S.SETyAnn (S.mkTypeAnn ty) $ case ty of
|
||||
PgTypeSimple colTy -> withGeoVal colTy sessVarVal
|
||||
PgTypeArray _ -> sessVarVal
|
||||
PGTypeSimple colTy -> withGeoVal colTy sessVarVal
|
||||
PGTypeArray _ -> sessVarVal
|
||||
RS.UVSQL sqlExp -> return sqlExp
|
||||
|
||||
getSessVarVal
|
||||
|
@ -22,7 +22,7 @@ type OpExp = OpExpG UnresolvedVal
|
||||
|
||||
parseOpExps
|
||||
:: (MonadError QErr m)
|
||||
=> PGScalarType -> AnnInpVal -> m [OpExp]
|
||||
=> PGColumnType -> AnnInpVal -> m [OpExp]
|
||||
parseOpExps colTy annVal = do
|
||||
opExpsM <- flip withObjectM annVal $ \nt objM -> forM objM $ \obj ->
|
||||
forM (OMap.toList obj) $ \(k, v) ->
|
||||
@ -56,8 +56,8 @@ parseOpExps colTy annVal = do
|
||||
"_contained_in" -> fmap AContainedIn <$> asOpRhs v
|
||||
"_has_key" -> fmap AHasKey <$> asOpRhs v
|
||||
|
||||
"_has_keys_any" -> fmap AHasKeysAny <$> asPGArray PGText v
|
||||
"_has_keys_all" -> fmap AHasKeysAll <$> asPGArray PGText v
|
||||
"_has_keys_any" -> fmap AHasKeysAny <$> asPGArray (PGColumnScalar PGText) v
|
||||
"_has_keys_all" -> fmap AHasKeysAll <$> asPGArray (PGColumnScalar PGText) v
|
||||
|
||||
-- geometry/geography type related operators
|
||||
"_st_contains" -> fmap ASTContains <$> asOpRhs v
|
||||
@ -77,13 +77,18 @@ parseOpExps colTy annVal = do
|
||||
<> showName k
|
||||
return $ catMaybes $ fromMaybe [] opExpsM
|
||||
where
|
||||
asOpRhs = fmap (fmap UVPG) . asPGColValM
|
||||
asOpRhs = fmap (fmap UVPG) . asPGColumnValueM
|
||||
|
||||
asPGArray rhsTy v = do
|
||||
valsM <- parseMany asPGColVal v
|
||||
valsM <- parseMany asPGColumnValue v
|
||||
forM valsM $ \vals -> do
|
||||
let arrayExp = S.SEArray $ map (txtEncoder . _apvValue) vals
|
||||
return $ UVSQL $ S.SETyAnn arrayExp $ S.mkTypeAnn $ PgTypeArray rhsTy
|
||||
let arrayExp = S.SEArray $ map (txtEncoder . pstValue . _apvValue) vals
|
||||
return $ UVSQL $ S.SETyAnn arrayExp $ S.mkTypeAnn $
|
||||
-- Safe here because asPGColumnValue ensured all the values are of the right type, but if the
|
||||
-- list is empty, we don’t actually have a scalar type to use, so we need to use
|
||||
-- unsafePGColumnToRepresentation to create it. (It would be nice to refactor things to
|
||||
-- somehow get rid of this.)
|
||||
PGTypeArray (unsafePGColumnToRepresentation rhsTy)
|
||||
|
||||
resolveIsNull v = case _aivValue v of
|
||||
AGScalar _ Nothing -> return Nothing
|
||||
@ -95,18 +100,18 @@ parseOpExps colTy annVal = do
|
||||
parseAsSTDWithinObj obj = do
|
||||
distanceVal <- onNothing (OMap.lookup "distance" obj) $
|
||||
throw500 "expected \"distance\" input field in st_d_within"
|
||||
dist <- UVPG <$> asPGColVal distanceVal
|
||||
dist <- UVPG <$> asPGColumnValue distanceVal
|
||||
fromVal <- onNothing (OMap.lookup "from" obj) $
|
||||
throw500 "expected \"from\" input field in st_d_within"
|
||||
from <- UVPG <$> asPGColVal fromVal
|
||||
from <- UVPG <$> asPGColumnValue fromVal
|
||||
case colTy of
|
||||
PGGeography -> do
|
||||
PGColumnScalar PGGeography -> do
|
||||
useSpheroidVal <-
|
||||
onNothing (OMap.lookup "use_spheroid" obj) $
|
||||
throw500 "expected \"use_spheroid\" input field in st_d_within"
|
||||
useSpheroid <- UVPG <$> asPGColVal useSpheroidVal
|
||||
useSpheroid <- UVPG <$> asPGColumnValue useSpheroidVal
|
||||
return $ ASTDWithinGeog $ DWithinGeogOp dist from useSpheroid
|
||||
PGGeometry ->
|
||||
PGColumnScalar PGGeometry ->
|
||||
return $ ASTDWithinGeom $ DWithinGeomOp dist from
|
||||
_ -> throw500 "expected PGGeometry/PGGeography column for st_d_within"
|
||||
|
||||
@ -117,7 +122,7 @@ parseCastExpression =
|
||||
withObjectM $ \_ objM -> forM objM $ \obj -> do
|
||||
targetExps <- forM (OMap.toList obj) $ \(targetTypeName, castedComparisonExpressionInput) -> do
|
||||
let targetType = txtToPgColTy $ G.unName targetTypeName
|
||||
castedComparisonExpressions <- parseOpExps targetType castedComparisonExpressionInput
|
||||
castedComparisonExpressions <- parseOpExps (PGColumnScalar targetType) castedComparisonExpressionInput
|
||||
return (targetType, castedComparisonExpressions)
|
||||
return $ Map.fromList targetExps
|
||||
|
||||
|
@ -112,10 +112,8 @@ withArgM args arg f = prependArgsInPath $ nameAsPath arg $
|
||||
|
||||
type PrepArgs = Seq.Seq Q.PrepArg
|
||||
|
||||
prepare
|
||||
:: (MonadState PrepArgs m) => AnnPGVal -> m S.SQLExp
|
||||
prepare (AnnPGVal _ _ colTy colVal) =
|
||||
prepareColVal colTy colVal
|
||||
prepare :: (MonadState PrepArgs m) => AnnPGVal -> m S.SQLExp
|
||||
prepare (AnnPGVal _ _ _ scalarValue) = prepareColVal scalarValue
|
||||
|
||||
resolveValPrep
|
||||
:: (MonadState PrepArgs m)
|
||||
@ -136,15 +134,14 @@ withPrepArgs m = runStateT m Seq.empty
|
||||
|
||||
prepareColVal
|
||||
:: (MonadState PrepArgs m)
|
||||
=> PGScalarType -> PGColValue -> m S.SQLExp
|
||||
prepareColVal colTy colVal = do
|
||||
=> PGScalarTyped PGColValue -> m S.SQLExp
|
||||
prepareColVal (PGScalarTyped scalarType colVal) = do
|
||||
preparedArgs <- get
|
||||
put (preparedArgs Seq.|> binEncoder colVal)
|
||||
return $ toPrepParam (Seq.length preparedArgs + 1) colTy
|
||||
return $ toPrepParam (Seq.length preparedArgs + 1) scalarType
|
||||
|
||||
txtConverter :: Applicative f => AnnPGVal -> f S.SQLExp
|
||||
txtConverter (AnnPGVal _ _ a b) =
|
||||
pure $ toTxtValue a b
|
||||
txtConverter (AnnPGVal _ _ _ scalarValue) = pure $ toTxtValue scalarValue
|
||||
|
||||
withSelSet :: (Monad m) => SelSet -> (Field -> m a) -> m [(Text, a)]
|
||||
withSelSet selSet f =
|
||||
|
@ -1,8 +1,9 @@
|
||||
module Hasura.GraphQL.Resolve.InputValue
|
||||
( withNotNull
|
||||
, tyMismatch
|
||||
, asPGColValM
|
||||
, asPGColVal
|
||||
, asPGColumnTypeAndValueM
|
||||
, asPGColumnValueM
|
||||
, asPGColumnValue
|
||||
, asEnumVal
|
||||
, asEnumValM
|
||||
, withObject
|
||||
@ -21,10 +22,12 @@ import Hasura.Prelude
|
||||
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import qualified Hasura.RQL.Types as RQL
|
||||
|
||||
import Hasura.GraphQL.Resolve.Context
|
||||
import Hasura.GraphQL.Validate.Types
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types ((<>>))
|
||||
import Hasura.SQL.Types
|
||||
import Hasura.SQL.Value
|
||||
|
||||
withNotNull
|
||||
@ -41,41 +44,43 @@ tyMismatch expectedTy v =
|
||||
getAnnInpValKind (_aivValue v) <> " for value of type " <>
|
||||
G.showGT (_aivType v)
|
||||
|
||||
asPGColValM
|
||||
asPGColumnTypeAndValueM
|
||||
:: (MonadError QErr m)
|
||||
=> AnnInpVal -> m (Maybe AnnPGVal)
|
||||
asPGColValM annInpVal = case val of
|
||||
AGScalar colTy valM ->
|
||||
return $ fmap (AnnPGVal varM (G.isNullable ty) colTy) valM
|
||||
_ ->
|
||||
tyMismatch "pgvalue" annInpVal
|
||||
where
|
||||
AnnInpVal ty varM val = annInpVal
|
||||
|
||||
asPGColVal
|
||||
:: (MonadError QErr m)
|
||||
=> AnnInpVal -> m AnnPGVal
|
||||
asPGColVal v = case _aivValue v of
|
||||
AGScalar colTy (Just val) ->
|
||||
return $ AnnPGVal (_aivVariable v) (G.isNullable (_aivType v)) colTy val
|
||||
AGScalar colTy Nothing -> throw500 $ "unexpected null for ty " <>> colTy
|
||||
=> AnnInpVal
|
||||
-> m (PGColumnType, PGScalarTyped (Maybe PGColValue))
|
||||
asPGColumnTypeAndValueM v = case _aivValue v of
|
||||
AGScalar colTy val -> pure (PGColumnScalar colTy, PGScalarTyped colTy val)
|
||||
AGEnum _ (AGEReference reference maybeValue) -> do
|
||||
let maybeScalarValue = PGValText . RQL.getEnumValue <$> maybeValue
|
||||
pure (PGColumnEnumReference reference, PGScalarTyped PGText maybeScalarValue)
|
||||
_ -> tyMismatch "pgvalue" v
|
||||
|
||||
asEnumVal
|
||||
:: (MonadError QErr m)
|
||||
=> AnnInpVal -> m (G.NamedType, G.EnumValue)
|
||||
asEnumVal v = case _aivValue v of
|
||||
AGEnum ty (Just val) -> return (ty, val)
|
||||
AGEnum ty Nothing ->
|
||||
throw500 $ "unexpected null for ty " <> showNamedTy ty
|
||||
_ -> tyMismatch "enum" v
|
||||
asPGColumnTypeAndAnnValueM :: (MonadError QErr m) => AnnInpVal -> m (PGColumnType, Maybe AnnPGVal)
|
||||
asPGColumnTypeAndAnnValueM v = do
|
||||
(columnType, scalarValueM) <- asPGColumnTypeAndValueM v
|
||||
let mkAnnPGColVal = AnnPGVal (_aivVariable v) (G.isNullable (_aivType v)) columnType
|
||||
pure (columnType, mkAnnPGColVal <$> sequence scalarValueM)
|
||||
|
||||
asEnumValM
|
||||
:: (MonadError QErr m)
|
||||
=> AnnInpVal -> m (G.NamedType, Maybe G.EnumValue)
|
||||
asPGColumnValueM :: (MonadError QErr m) => AnnInpVal -> m (Maybe AnnPGVal)
|
||||
asPGColumnValueM = fmap snd . asPGColumnTypeAndAnnValueM
|
||||
|
||||
asPGColumnValue :: (MonadError QErr m) => AnnInpVal -> m AnnPGVal
|
||||
asPGColumnValue v = do
|
||||
(columnType, annPGValM) <- asPGColumnTypeAndAnnValueM v
|
||||
onNothing annPGValM $ throw500 ("unexpected null for type " <>> columnType)
|
||||
|
||||
-- | Note: only handles “synthetic” enums (see 'EnumValuesInfo'). Enum table references are handled
|
||||
-- by 'asPGColumnTypeAndValueM' and its variants.
|
||||
asEnumVal :: (MonadError QErr m) => AnnInpVal -> m (G.NamedType, G.EnumValue)
|
||||
asEnumVal = asEnumValM >=> \case
|
||||
(ty, Just val) -> pure (ty, val)
|
||||
(ty, Nothing) -> throw500 $ "unexpected null for ty " <> showNamedTy ty
|
||||
|
||||
-- | Like 'asEnumVal', only handles “synthetic” enums.
|
||||
asEnumValM :: (MonadError QErr m) => AnnInpVal -> m (G.NamedType, Maybe G.EnumValue)
|
||||
asEnumValM v = case _aivValue v of
|
||||
AGEnum ty valM -> return (ty, valM)
|
||||
_ -> tyMismatch "enum" v
|
||||
AGEnum ty (AGESynthetic valM) -> return (ty, valM)
|
||||
_ -> tyMismatch "enum" v
|
||||
|
||||
withObject
|
||||
:: (MonadError QErr m)
|
||||
@ -144,12 +149,12 @@ asPGColText
|
||||
:: (MonadError QErr m)
|
||||
=> AnnInpVal -> m Text
|
||||
asPGColText val = do
|
||||
pgColVal <- _apvValue <$> asPGColVal val
|
||||
pgColVal <- pstValue . _apvValue <$> asPGColumnValue val
|
||||
onlyText pgColVal
|
||||
|
||||
asPGColTextM
|
||||
:: (MonadError QErr m)
|
||||
=> AnnInpVal -> m (Maybe Text)
|
||||
asPGColTextM val = do
|
||||
pgColValM <- fmap _apvValue <$> asPGColValM val
|
||||
pgColValM <- fmap (pstValue . _apvValue) <$> asPGColumnValueM val
|
||||
mapM onlyText pgColValM
|
||||
|
@ -5,7 +5,6 @@ where
|
||||
import Data.Has
|
||||
import Hasura.EncJSON
|
||||
import Hasura.Prelude
|
||||
import Hasura.Server.Utils
|
||||
|
||||
import qualified Data.Aeson as J
|
||||
import qualified Data.Aeson.Casing as J
|
||||
@ -19,7 +18,6 @@ import qualified Language.GraphQL.Draft.Syntax as G
|
||||
import qualified Database.PG.Query as Q
|
||||
import qualified Hasura.RQL.DML.Insert as RI
|
||||
import qualified Hasura.RQL.DML.Returning as RR
|
||||
import qualified Hasura.RQL.GBoolExp as RB
|
||||
|
||||
import qualified Hasura.SQL.DML as S
|
||||
|
||||
@ -71,7 +69,7 @@ data RelIns a
|
||||
type ObjRelIns = RelIns SingleObjIns
|
||||
type ArrRelIns = RelIns MultiObjIns
|
||||
|
||||
type PGColWithValue = (PGCol, PGColValue)
|
||||
type PGColWithValue = (PGCol, PGScalarTyped PGColValue)
|
||||
|
||||
data CTEExp
|
||||
= CTEExp
|
||||
@ -81,7 +79,7 @@ data CTEExp
|
||||
|
||||
data AnnInsObj
|
||||
= AnnInsObj
|
||||
{ _aioColumns :: ![(PGCol, PGScalarType, PGColValue)]
|
||||
{ _aioColumns :: ![PGColWithValue]
|
||||
, _aioObjRels :: ![ObjRelIns]
|
||||
, _aioArrRels :: ![ArrRelIns]
|
||||
} deriving (Show, Eq)
|
||||
@ -104,12 +102,17 @@ traverseInsObj
|
||||
-> m AnnInsObj
|
||||
traverseInsObj rim (gName, annVal) defVal@(AnnInsObj cols objRels arrRels) =
|
||||
case _aivValue annVal of
|
||||
AGScalar colty mColVal -> do
|
||||
let col = PGCol $ G.unName gName
|
||||
colVal = fromMaybe (PGNull colty) mColVal
|
||||
return (AnnInsObj ((col, colty, colVal):cols) objRels arrRels)
|
||||
AGScalar{} -> parseValue
|
||||
AGEnum{} -> parseValue
|
||||
_ -> parseObject
|
||||
where
|
||||
parseValue = do
|
||||
(_, PGScalarTyped scalarType maybeScalarValue) <- asPGColumnTypeAndValueM annVal
|
||||
let columnName = PGCol $ G.unName gName
|
||||
scalarValue = fromMaybe (PGNull scalarType) maybeScalarValue
|
||||
pure $ AnnInsObj ((columnName, PGScalarTyped scalarType scalarValue):cols) objRels arrRels
|
||||
|
||||
_ -> do
|
||||
parseObject = do
|
||||
objM <- asObjectM annVal
|
||||
-- if relational insert input is 'null' then ignore
|
||||
-- return default value
|
||||
@ -124,8 +127,7 @@ traverseInsObj rim (gName, annVal) defVal@(AnnInsObj cols objRels arrRels) =
|
||||
|
||||
let rTable = riRTable relInfo
|
||||
InsCtx rtView rtCols rtDefVals rtRelInfoMap rtUpdPerm <- getInsCtx rTable
|
||||
rtDefValsRes <- mapM (convPartialSQLExp sessVarFromCurrentSetting)
|
||||
rtDefVals
|
||||
rtDefValsRes <- mapM (convPartialSQLExp sessVarFromCurrentSetting) rtDefVals
|
||||
|
||||
withPathK (G.unName gName) $ case riType relInfo of
|
||||
ObjRel -> do
|
||||
@ -185,11 +187,11 @@ parseOnConflict tn updFiltrM val = withPathK "on_conflict" $
|
||||
|
||||
toSQLExps
|
||||
:: (MonadError QErr m, MonadState PrepArgs m)
|
||||
=> [(PGCol, PGScalarType, PGColValue)]
|
||||
=> [PGColWithValue]
|
||||
-> m [(PGCol, S.SQLExp)]
|
||||
toSQLExps cols =
|
||||
forM cols $ \(c, ty, v) -> do
|
||||
prepExp <- prepareColVal ty v
|
||||
forM cols $ \(c, v) -> do
|
||||
prepExp <- prepareColVal v
|
||||
return (c, prepExp)
|
||||
|
||||
mkSQLRow :: Map.HashMap PGCol S.SQLExp -> [(PGCol, S.SQLExp)] -> [S.SQLExp]
|
||||
@ -200,7 +202,7 @@ mkInsertQ
|
||||
:: MonadError QErr m
|
||||
=> QualifiedTable
|
||||
-> Maybe RI.ConflictClauseP1
|
||||
-> [(PGCol, PGScalarType, PGColValue)]
|
||||
-> [PGColWithValue]
|
||||
-> [PGCol]
|
||||
-> Map.HashMap PGCol S.SQLExp
|
||||
-> RoleName
|
||||
@ -232,13 +234,13 @@ fetchFromColVals
|
||||
=> ColVals
|
||||
-> [PGColInfo]
|
||||
-> (PGColInfo -> a)
|
||||
-> m [(a, PGColValue)]
|
||||
-> m [(a, PGScalarTyped PGColValue)]
|
||||
fetchFromColVals colVal reqCols f =
|
||||
forM reqCols $ \ci -> do
|
||||
let valM = Map.lookup (pgiName ci) colVal
|
||||
val <- onNothing valM $ throw500 $ "column "
|
||||
<> pgiName ci <<> " not found in given colVal"
|
||||
pgColVal <- RB.pgValParser (pgiType ci) val
|
||||
pgColVal <- parsePGScalarValue (pgiType ci) val
|
||||
return (f ci, pgColVal)
|
||||
|
||||
mkSelCTE
|
||||
@ -365,7 +367,7 @@ insertObj
|
||||
-> Q.TxE QErr (Int, CTEExp)
|
||||
insertObj strfyNum role tn singleObjIns addCols = do
|
||||
-- validate insert
|
||||
validateInsert (map _1 cols) (map _riRelInfo objRels) $ map fst addCols
|
||||
validateInsert (map fst cols) (map _riRelInfo objRels) $ map fst addCols
|
||||
|
||||
-- insert all object relations and fetch this insert dependent column values
|
||||
objInsRes <- forM objRels $ insertObjRel strfyNum role
|
||||
@ -373,9 +375,7 @@ insertObj strfyNum role tn singleObjIns addCols = do
|
||||
-- prepare final insert columns
|
||||
let objRelAffRows = sum $ map fst objInsRes
|
||||
objRelDeterminedCols = concatMap snd objInsRes
|
||||
objRelInsCols = mkPGColWithTypeAndVal allCols objRelDeterminedCols
|
||||
addInsCols = mkPGColWithTypeAndVal allCols addCols
|
||||
finalInsCols = cols <> objRelInsCols <> addInsCols
|
||||
finalInsCols = cols <> objRelDeterminedCols <> addCols
|
||||
|
||||
-- prepare insert query as with expression
|
||||
(CTEExp cte insPArgs, ccM) <-
|
||||
@ -435,10 +435,9 @@ insertMultipleObjects strfyNum role tn multiObjIns addCols mutFlds errP =
|
||||
-- insert all column rows at one go
|
||||
withoutRelsInsert = withErrPath $ do
|
||||
indexedForM_ insCols $ \insCol ->
|
||||
validateInsert (map _1 insCol) [] $ map fst addCols
|
||||
validateInsert (map fst insCol) [] $ map fst addCols
|
||||
|
||||
let addColsWithType = mkPGColWithTypeAndVal tableColInfos addCols
|
||||
withAddCols = flip map insCols $ union addColsWithType
|
||||
let withAddCols = flip map insCols $ union addCols
|
||||
tableCols = map pgiName tableColInfos
|
||||
|
||||
(sqlRows, prepArgs) <- flip runStateT Seq.Empty $ do
|
||||
@ -533,10 +532,3 @@ mergeListsWith [] _ _ _ = []
|
||||
mergeListsWith (x:xs) l b f = case find (b x) l of
|
||||
Nothing -> mergeListsWith xs l b f
|
||||
Just y -> f x y : mergeListsWith xs l b f
|
||||
|
||||
mkPGColWithTypeAndVal :: [PGColInfo] -> [PGColWithValue]
|
||||
-> [(PGCol, PGScalarType, PGColValue)]
|
||||
mkPGColWithTypeAndVal pgColInfos pgColWithVal =
|
||||
mergeListsWith pgColInfos pgColWithVal
|
||||
(\ci (c, _) -> pgiName ci == c)
|
||||
(\ci (c, v) -> (c, pgiType ci, v))
|
||||
|
@ -6,19 +6,20 @@ module Hasura.GraphQL.Resolve.Introspect
|
||||
import Data.Has
|
||||
import Hasura.Prelude
|
||||
|
||||
import qualified Data.Aeson as J
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.HashSet as Set
|
||||
import qualified Data.Text as T
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
import qualified Data.Aeson as J
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.HashSet as Set
|
||||
import qualified Data.Text as T
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import Hasura.GraphQL.Resolve.Context
|
||||
import Hasura.GraphQL.Resolve.InputValue
|
||||
import Hasura.GraphQL.Validate.InputValue
|
||||
import Hasura.GraphQL.Validate.Context
|
||||
import Hasura.GraphQL.Validate.Field
|
||||
import Hasura.GraphQL.Validate.InputValue
|
||||
import Hasura.GraphQL.Validate.Types
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
import Hasura.SQL.Value
|
||||
|
||||
data TypeKind
|
||||
@ -163,7 +164,7 @@ enumTypeR (EnumTyInfo descM n vals _) fld =
|
||||
"name" -> retJ $ namedTyToTxt n
|
||||
"description" -> retJ $ fmap G.unDescription descM
|
||||
"enumValues" -> fmap J.toJSON $ mapM (enumValueR subFld) $
|
||||
sortOn _eviVal $ Map.elems vals
|
||||
sortOn _eviVal $ Map.elems (normalizeEnumValues vals)
|
||||
_ -> return J.Null
|
||||
|
||||
-- 4.5.2.6
|
||||
@ -339,7 +340,7 @@ typeR
|
||||
=> Field -> m J.Value
|
||||
typeR fld = do
|
||||
name <- withArg args "name" $ \arg -> do
|
||||
pgColVal <- _apvValue <$> asPGColVal arg
|
||||
pgColVal <- pstValue . _apvValue <$> asPGColumnValue arg
|
||||
case pgColVal of
|
||||
PGValText t -> return t
|
||||
_ -> throw500 "expecting string for name arg of __type"
|
||||
|
@ -18,8 +18,8 @@ import qualified Hasura.RQL.DML.Delete as RD
|
||||
import qualified Hasura.RQL.DML.Returning as RR
|
||||
import qualified Hasura.RQL.DML.Update as RU
|
||||
|
||||
import qualified Hasura.SQL.DML as S
|
||||
import qualified Hasura.RQL.DML.Select as RS
|
||||
import qualified Hasura.SQL.DML as S
|
||||
|
||||
import Hasura.EncJSON
|
||||
import Hasura.GraphQL.Resolve.BoolExp
|
||||
@ -60,7 +60,7 @@ convertRowObj
|
||||
convertRowObj val =
|
||||
flip withObject val $ \_ obj ->
|
||||
forM (OMap.toList obj) $ \(k, v) -> do
|
||||
prepExpM <- fmap UVPG <$> asPGColValM v
|
||||
prepExpM <- fmap UVPG <$> asPGColumnValueM v
|
||||
let prepExp = fromMaybe (UVSQL $ S.SEUnsafe "NULL") prepExpM
|
||||
return (PGCol $ G.unName k, prepExp)
|
||||
|
||||
@ -83,7 +83,7 @@ convObjWithOp
|
||||
=> ApplySQLOp -> AnnInpVal -> m [(PGCol, UnresolvedVal)]
|
||||
convObjWithOp opFn val =
|
||||
flip withObject val $ \_ obj -> forM (OMap.toList obj) $ \(k, v) -> do
|
||||
colVal <- _apvValue <$> asPGColVal v
|
||||
colVal <- pstValue . _apvValue <$> asPGColumnValue v
|
||||
let pgCol = PGCol $ G.unName k
|
||||
-- TODO: why are we using txtEncoder here?
|
||||
encVal = txtEncoder colVal
|
||||
@ -95,8 +95,8 @@ convDeleteAtPathObj
|
||||
=> AnnInpVal -> m [(PGCol, UnresolvedVal)]
|
||||
convDeleteAtPathObj val =
|
||||
flip withObject val $ \_ obj -> forM (OMap.toList obj) $ \(k, v) -> do
|
||||
vals <- flip withArray v $ \_ annVals -> mapM asPGColVal annVals
|
||||
let valExps = map (txtEncoder . _apvValue) vals
|
||||
vals <- flip withArray v $ \_ annVals -> mapM asPGColumnValue annVals
|
||||
let valExps = map (txtEncoder . pstValue . _apvValue) vals
|
||||
pgCol = PGCol $ G.unName k
|
||||
annEncVal = S.SETyAnn (S.SEArray valExps) S.textArrTypeAnn
|
||||
sqlExp = S.SEOpApp S.jsonbDeleteAtPathOp
|
||||
|
@ -115,7 +115,7 @@ parseTableArgs args = do
|
||||
ordByExpML <- withArgM args "order_by" parseOrderBy
|
||||
let ordByExpM = NE.nonEmpty =<< ordByExpML
|
||||
limitExpM <- withArgM args "limit" parseLimit
|
||||
offsetExpM <- withArgM args "offset" $ asPGColVal >=> txtConverter
|
||||
offsetExpM <- withArgM args "offset" $ asPGColumnValue >=> txtConverter
|
||||
distOnColsML <- withArgM args "distinct_on" parseColumns
|
||||
let distOnColsM = NE.nonEmpty =<< distOnColsML
|
||||
mapM_ (validateDistOn ordByExpM) distOnColsM
|
||||
@ -255,7 +255,7 @@ parseOrderByEnum = \case
|
||||
|
||||
parseLimit :: ( MonadError QErr m ) => AnnInpVal -> m Int
|
||||
parseLimit v = do
|
||||
pgColVal <- _apvValue <$> asPGColVal v
|
||||
pgColVal <- pstValue . _apvValue <$> asPGColumnValue v
|
||||
limit <- maybe noIntErr return $ pgColValueToInt pgColVal
|
||||
-- validate int value
|
||||
onlyPositiveInt limit
|
||||
@ -273,7 +273,7 @@ pgColValToBoolExp
|
||||
pgColValToBoolExp colArgMap colValMap = do
|
||||
colExps <- forM colVals $ \(name, val) ->
|
||||
BoolFld <$> do
|
||||
opExp <- AEQ True . UVPG <$> asPGColVal val
|
||||
opExp <- AEQ True . UVPG <$> asPGColumnValue val
|
||||
colInfo <- onNothing (Map.lookup name colArgMap) $
|
||||
throw500 $ "column name " <> showName name
|
||||
<> " not found in column arguments map"
|
||||
@ -341,7 +341,7 @@ convertCount args = do
|
||||
maybe (return S.CTStar) (mkCType isDistinct) columnsM
|
||||
where
|
||||
parseDistinct v = do
|
||||
val <- _apvValue <$> asPGColVal v
|
||||
val <- pstValue . _apvValue <$> asPGColumnValue v
|
||||
case val of
|
||||
PGValBoolean b -> return b
|
||||
_ ->
|
||||
@ -417,7 +417,7 @@ parseFunctionArgs
|
||||
parseFunctionArgs argSeq val = fmap catMaybes $
|
||||
flip withObject val $ \_ obj ->
|
||||
fmap toList $ forM argSeq $ \(FuncArgItem argName) ->
|
||||
forM (OMap.lookup argName obj) $ fmap (maybe nullSQL UVPG) . asPGColValM
|
||||
forM (OMap.lookup argName obj) $ fmap (maybe nullSQL UVPG) . asPGColumnValueM
|
||||
where
|
||||
nullSQL = UVSQL $ S.SEUnsafe "NULL"
|
||||
|
||||
|
@ -8,6 +8,7 @@ import qualified Data.Text as T
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import Hasura.RQL.Types.BoolExp
|
||||
import Hasura.RQL.Types.Column
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.Permission
|
||||
import Hasura.SQL.Types
|
||||
@ -141,8 +142,12 @@ data AnnPGVal
|
||||
= AnnPGVal
|
||||
{ _apvVariable :: !(Maybe G.Variable)
|
||||
, _apvIsNullable :: !Bool
|
||||
, _apvType :: !PGScalarType
|
||||
, _apvValue :: !PGColValue
|
||||
, _apvType :: !PGColumnType
|
||||
-- ^ Note: '_apvValue' is a @'PGScalarTyped' 'PGColValue'@, so it includes its type as a
|
||||
-- 'PGScalarType'. However, we /also/ need to keep the original 'PGColumnType' information around
|
||||
-- in case we need to re-parse a new value with its type because we’re reusing a cached query
|
||||
-- plan.
|
||||
, _apvValue :: !(PGScalarTyped PGColValue)
|
||||
} deriving (Show, Eq)
|
||||
|
||||
type PrepFn m = AnnPGVal -> m S.SQLExp
|
||||
@ -156,7 +161,7 @@ partialSQLExpToUnresolvedVal = \case
|
||||
-- A value that will be converted to an sql expression eventually
|
||||
data UnresolvedVal
|
||||
-- From a session variable
|
||||
= UVSessVar !PgType !SessVar
|
||||
= UVSessVar !(PGType PGScalarType) !SessVar
|
||||
-- This is postgres
|
||||
| UVPG !AnnPGVal
|
||||
-- This is a full resolved sql expression
|
||||
|
@ -17,6 +17,7 @@ module Hasura.GraphQL.Schema
|
||||
, checkSchemaConflicts
|
||||
) where
|
||||
|
||||
import Control.Lens.Extended hiding (op)
|
||||
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.HashSet as Set
|
||||
@ -44,16 +45,16 @@ import Hasura.GraphQL.Schema.OrderBy
|
||||
import Hasura.GraphQL.Schema.Select
|
||||
import Hasura.GraphQL.Schema.Merge
|
||||
|
||||
getInsPerm :: TableInfo -> RoleName -> Maybe InsPermInfo
|
||||
getInsPerm :: TableInfo PGColInfo -> RoleName -> Maybe InsPermInfo
|
||||
getInsPerm tabInfo role
|
||||
| role == adminRole = _permIns $ mkAdminRolePermInfo tabInfo
|
||||
| otherwise = Map.lookup role rolePermInfoMap >>= _permIns
|
||||
where
|
||||
rolePermInfoMap = tiRolePermInfoMap tabInfo
|
||||
rolePermInfoMap = _tiRolePermInfoMap tabInfo
|
||||
|
||||
getTabInfo
|
||||
:: MonadError QErr m
|
||||
=> TableCache -> QualifiedTable -> m TableInfo
|
||||
=> TableCache PGColInfo -> QualifiedTable -> m (TableInfo PGColInfo)
|
||||
getTabInfo tc t =
|
||||
onNothing (Map.lookup t tc) $
|
||||
throw500 $ "table not found: " <>> t
|
||||
@ -67,32 +68,32 @@ isValidCol = isValidName . G.Name . getPGColTxt
|
||||
isValidRel :: ToTxt a => RelName -> QualifiedObject a -> Bool
|
||||
isValidRel rn rt = isValidName (mkRelName rn) && isValidObjectName rt
|
||||
|
||||
isValidField :: FieldInfo -> Bool
|
||||
isValidField :: FieldInfo PGColInfo -> Bool
|
||||
isValidField = \case
|
||||
FIColumn (PGColInfo col _ _) -> isValidCol col
|
||||
FIRelationship (RelInfo rn _ _ remTab _) -> isValidRel rn remTab
|
||||
|
||||
upsertable :: [ConstraintName] -> Bool -> Bool -> Bool
|
||||
upsertable uniqueOrPrimaryCons isUpsertAllowed view =
|
||||
not (null uniqueOrPrimaryCons) && isUpsertAllowed && not view
|
||||
upsertable uniqueOrPrimaryCons isUpsertAllowed isAView =
|
||||
not (null uniqueOrPrimaryCons) && isUpsertAllowed && not isAView
|
||||
|
||||
toValidFieldInfos :: FieldInfoMap -> [FieldInfo]
|
||||
toValidFieldInfos :: FieldInfoMap PGColInfo -> [FieldInfo PGColInfo]
|
||||
toValidFieldInfos = filter isValidField . Map.elems
|
||||
|
||||
validPartitionFieldInfoMap :: FieldInfoMap -> ([PGColInfo], [RelInfo])
|
||||
validPartitionFieldInfoMap :: FieldInfoMap PGColInfo -> ([PGColInfo], [RelInfo])
|
||||
validPartitionFieldInfoMap = partitionFieldInfos . toValidFieldInfos
|
||||
|
||||
getValidCols :: FieldInfoMap -> [PGColInfo]
|
||||
getValidCols :: FieldInfoMap PGColInfo -> [PGColInfo]
|
||||
getValidCols = fst . validPartitionFieldInfoMap
|
||||
|
||||
getValidRels :: FieldInfoMap -> [RelInfo]
|
||||
getValidRels :: FieldInfoMap PGColInfo -> [RelInfo]
|
||||
getValidRels = snd . validPartitionFieldInfoMap
|
||||
|
||||
mkValidConstraints :: [ConstraintName] -> [ConstraintName]
|
||||
mkValidConstraints =
|
||||
filter (isValidName . G.Name . getConstraintTxt)
|
||||
|
||||
isRelNullable :: FieldInfoMap -> RelInfo -> Bool
|
||||
isRelNullable :: FieldInfoMap PGColInfo -> RelInfo -> Bool
|
||||
isRelNullable fim ri = isNullable
|
||||
where
|
||||
lCols = map fst $ riMapping ri
|
||||
@ -113,24 +114,26 @@ isAggFld = flip elem (numAggOps <> compAggOps)
|
||||
|
||||
mkGCtxRole'
|
||||
:: QualifiedTable
|
||||
-- insert permission
|
||||
-> Maybe ([PGColInfo], RelationInfoMap)
|
||||
-- select permission
|
||||
-- ^ insert permission
|
||||
-> Maybe (Bool, [SelField])
|
||||
-- update cols
|
||||
-- ^ select permission
|
||||
-> Maybe [PGColInfo]
|
||||
-- delete cols
|
||||
-- ^ update cols
|
||||
-> Maybe ()
|
||||
-- primary key columns
|
||||
-- ^ delete cols
|
||||
-> [PGColInfo]
|
||||
-- constraints
|
||||
-- ^ primary key columns
|
||||
-> [ConstraintName]
|
||||
-- ^ constraints
|
||||
-> Maybe ViewInfo
|
||||
-- all functions
|
||||
-> [FunctionInfo]
|
||||
-- ^ all functions
|
||||
-> Maybe EnumValues
|
||||
-- ^ present iff this table is an enum table (see "Hasura.RQL.Schema.Enum")
|
||||
-> TyAgg
|
||||
mkGCtxRole' tn insPermM selPermM updColsM
|
||||
delPermM pkeyCols constraints viM funcs =
|
||||
delPermM pkeyCols constraints viM funcs enumValuesM =
|
||||
|
||||
TyAgg (mkTyInfoMap allTypes) fieldMap scalars ordByCtx
|
||||
where
|
||||
@ -163,6 +166,7 @@ mkGCtxRole' tn insPermM selPermM updColsM
|
||||
, TIInpObj <$> mutHelper viIsUpdatable updIncInpObjM
|
||||
, TIObj <$> mutRespObjM
|
||||
, TIEnum <$> selColInpTyM
|
||||
, TIEnum <$> tableEnumTypeM
|
||||
]
|
||||
|
||||
mutHelper :: (ViewInfo -> Bool) -> Maybe a -> Maybe a
|
||||
@ -172,7 +176,7 @@ mkGCtxRole' tn insPermM selPermM updColsM
|
||||
[ insInpObjFldsM, updSetInpObjFldsM
|
||||
, boolExpInpObjFldsM , selObjFldsM
|
||||
]
|
||||
scalars = Set.unions [selByPkScalarSet, funcArgScalarSet]
|
||||
scalars = selByPkScalarSet <> funcArgScalarSet
|
||||
|
||||
-- helper
|
||||
mkColFldMap ty cols = Map.fromList $ flip map cols $
|
||||
@ -210,8 +214,7 @@ mkGCtxRole' tn insPermM selPermM updColsM
|
||||
-- funcargs input type
|
||||
funcArgInpObjs = mapMaybe mkFuncArgsInp funcs
|
||||
-- funcArgCtx = Map.unions funcArgCtxs
|
||||
funcArgScalarSet = Set.fromList $
|
||||
concatMap (map faType . toList . fiInputArgs) funcs
|
||||
funcArgScalarSet = funcs ^.. folded.to fiInputArgs.folded.to faType
|
||||
|
||||
-- helper
|
||||
mkFldMap ty = Map.fromList . concatMap (mkFld ty)
|
||||
@ -260,32 +263,36 @@ mkGCtxRole' tn insPermM selPermM updColsM
|
||||
getCompCols = onlyComparableCols . lefts
|
||||
onlyFloat = const $ mkScalarTy PGFloat
|
||||
|
||||
mkTypeMaker "sum" = mkScalarTy
|
||||
mkTypeMaker "sum" = mkColumnType
|
||||
mkTypeMaker _ = onlyFloat
|
||||
|
||||
mkColAggFldsObjs flds =
|
||||
let numCols = getNumCols flds
|
||||
compCols = getCompCols flds
|
||||
mkNumObjFld n = mkTableColAggFldsObj tn n (mkTypeMaker n) numCols
|
||||
mkCompObjFld n = mkTableColAggFldsObj tn n mkScalarTy compCols
|
||||
mkCompObjFld n = mkTableColAggFldsObj tn n mkColumnType compCols
|
||||
numFldsObjs = bool (map mkNumObjFld numAggOps) [] $ null numCols
|
||||
compFldsObjs = bool (map mkCompObjFld compAggOps) [] $ null compCols
|
||||
in numFldsObjs <> compFldsObjs
|
||||
-- the fields used in table object
|
||||
selObjFldsM = mkFldMap (mkTableTy tn) <$> selFldsM
|
||||
-- the scalar set for table_by_pk arguments
|
||||
selByPkScalarSet = Set.fromList $ map pgiType pkeyCols
|
||||
selByPkScalarSet = pkeyCols ^.. folded.to pgiType._PGColumnScalar
|
||||
|
||||
ordByInpCtxM = mkOrdByInpObj tn <$> selFldsM
|
||||
(ordByInpObjM, ordByCtxM) = case ordByInpCtxM of
|
||||
Just (a, b) -> (Just a, Just b)
|
||||
Nothing -> (Nothing, Nothing)
|
||||
|
||||
tableEnumTypeM = enumValuesM <&> \enumValues ->
|
||||
mkHsraEnumTyInfo Nothing (mkTableEnumType tn) $
|
||||
EnumValuesReference (EnumReference tn enumValues)
|
||||
|
||||
getRootFldsRole'
|
||||
:: QualifiedTable
|
||||
-> [PGCol]
|
||||
-> [ConstraintName]
|
||||
-> FieldInfoMap
|
||||
-> FieldInfoMap PGColInfo
|
||||
-> [FunctionInfo]
|
||||
-> Maybe ([T.Text], Bool) -- insert perm
|
||||
-> Maybe (AnnBoolExpPartialSQL, Maybe Int, [T.Text], Bool) -- select filter
|
||||
@ -372,15 +379,15 @@ getRootFldsRole' tn primCols constraints fields funcs insM selM updM delM viM =
|
||||
procFuncArgs (fiInputArgs fi) $ \_ t -> FuncArgItem $ G.Name t
|
||||
|
||||
|
||||
getSelPermission :: TableInfo -> RoleName -> Maybe SelPermInfo
|
||||
getSelPermission :: TableInfo PGColInfo -> RoleName -> Maybe SelPermInfo
|
||||
getSelPermission tabInfo role =
|
||||
Map.lookup role (tiRolePermInfoMap tabInfo) >>= _permSel
|
||||
Map.lookup role (_tiRolePermInfoMap tabInfo) >>= _permSel
|
||||
|
||||
getSelPerm
|
||||
:: (MonadError QErr m)
|
||||
=> TableCache
|
||||
=> TableCache PGColInfo
|
||||
-- all the fields of a table
|
||||
-> FieldInfoMap
|
||||
-> FieldInfoMap PGColInfo
|
||||
-- role and its permission
|
||||
-> RoleName -> SelPermInfo
|
||||
-> m (Bool, [SelField])
|
||||
@ -406,8 +413,8 @@ getSelPerm tableCache fields role selPermInfo = do
|
||||
mkInsCtx
|
||||
:: MonadError QErr m
|
||||
=> RoleName
|
||||
-> TableCache
|
||||
-> FieldInfoMap
|
||||
-> TableCache PGColInfo
|
||||
-> FieldInfoMap PGColInfo
|
||||
-> InsPermInfo
|
||||
-> Maybe UpdPermInfo
|
||||
-> m InsCtx
|
||||
@ -417,7 +424,7 @@ mkInsCtx role tableCache fields insPermInfo updPermM = do
|
||||
relName = riName relInfo
|
||||
remoteTableInfo <- getTabInfo tableCache remoteTable
|
||||
let insPermM = getInsPerm remoteTableInfo role
|
||||
viewInfoM = tiViewInfo remoteTableInfo
|
||||
viewInfoM = _tiViewInfo remoteTableInfo
|
||||
return $ bool Nothing (Just (relName, relInfo)) $
|
||||
isInsertable insPermM viewInfoM && isValidRel relName remoteTable
|
||||
|
||||
@ -438,15 +445,15 @@ mkInsCtx role tableCache fields insPermInfo updPermM = do
|
||||
mkAdminInsCtx
|
||||
:: MonadError QErr m
|
||||
=> QualifiedTable
|
||||
-> TableCache
|
||||
-> FieldInfoMap
|
||||
-> TableCache PGColInfo
|
||||
-> FieldInfoMap PGColInfo
|
||||
-> m InsCtx
|
||||
mkAdminInsCtx tn tc fields = do
|
||||
relTupsM <- forM rels $ \relInfo -> do
|
||||
let remoteTable = riRTable relInfo
|
||||
relName = riName relInfo
|
||||
remoteTableInfo <- getTabInfo tc remoteTable
|
||||
let viewInfoM = tiViewInfo remoteTableInfo
|
||||
let viewInfoM = _tiViewInfo remoteTableInfo
|
||||
return $ bool Nothing (Just (relName, relInfo)) $
|
||||
isMutable viIsInsertable viewInfoM && isValidRel relName remoteTable
|
||||
|
||||
@ -461,17 +468,18 @@ mkAdminInsCtx tn tc fields = do
|
||||
|
||||
mkGCtxRole
|
||||
:: (MonadError QErr m)
|
||||
=> TableCache
|
||||
=> TableCache PGColInfo
|
||||
-> QualifiedTable
|
||||
-> FieldInfoMap
|
||||
-> FieldInfoMap PGColInfo
|
||||
-> [PGCol]
|
||||
-> [ConstraintName]
|
||||
-> [FunctionInfo]
|
||||
-> Maybe ViewInfo
|
||||
-> Maybe EnumValues
|
||||
-> RoleName
|
||||
-> RolePermInfo
|
||||
-> m (TyAgg, RootFields, InsCtxMap)
|
||||
mkGCtxRole tableCache tn fields pCols constraints funcs viM role permInfo = do
|
||||
mkGCtxRole tableCache tn fields pCols constraints funcs viM enumValuesM role permInfo = do
|
||||
selPermM <- mapM (getSelPerm tableCache fields role) $ _permSel permInfo
|
||||
tabInsInfoM <- forM (_permIns permInfo) $ \ipi -> do
|
||||
ctx <- mkInsCtx role tableCache fields ipi $ _permUpd permInfo
|
||||
@ -482,7 +490,7 @@ mkGCtxRole tableCache tn fields pCols constraints funcs viM role permInfo = do
|
||||
insCtxM = fst <$> tabInsInfoM
|
||||
updColsM = filterColInfos . upiCols <$> _permUpd permInfo
|
||||
tyAgg = mkGCtxRole' tn insPermM selPermM updColsM
|
||||
(void $ _permDel permInfo) pColInfos constraints viM funcs
|
||||
(void $ _permDel permInfo) pColInfos constraints viM funcs enumValuesM
|
||||
rootFlds = getRootFldsRole tn pCols constraints fields funcs viM permInfo
|
||||
insCtxMap = maybe Map.empty (Map.singleton tn) insCtxM
|
||||
return (tyAgg, rootFlds, insCtxMap)
|
||||
@ -497,7 +505,7 @@ getRootFldsRole
|
||||
:: QualifiedTable
|
||||
-> [PGCol]
|
||||
-> [ConstraintName]
|
||||
-> FieldInfoMap
|
||||
-> FieldInfoMap PGColInfo
|
||||
-> [FunctionInfo]
|
||||
-> Maybe ViewInfo
|
||||
-> RolePermInfo
|
||||
@ -521,21 +529,22 @@ getRootFldsRole tn pCols constraints fields funcs viM (RolePermInfo insM selM up
|
||||
|
||||
mkGCtxMapTable
|
||||
:: (MonadError QErr m)
|
||||
=> TableCache
|
||||
=> TableCache PGColInfo
|
||||
-> FunctionCache
|
||||
-> TableInfo
|
||||
-> TableInfo PGColInfo
|
||||
-> m (Map.HashMap RoleName (TyAgg, RootFields, InsCtxMap))
|
||||
mkGCtxMapTable tableCache funcCache tabInfo = do
|
||||
m <- Map.traverseWithKey
|
||||
(mkGCtxRole tableCache tn fields pkeyCols validConstraints tabFuncs viewInfo) rolePerms
|
||||
(mkGCtxRole tableCache tn fields pkeyCols validConstraints tabFuncs viewInfo enumValues)
|
||||
rolePerms
|
||||
adminInsCtx <- mkAdminInsCtx tn tableCache fields
|
||||
let adminCtx = mkGCtxRole' tn (Just (colInfos, icRelations adminInsCtx))
|
||||
(Just (True, selFlds)) (Just colInfos) (Just ())
|
||||
pkeyColInfos validConstraints viewInfo tabFuncs
|
||||
pkeyColInfos validConstraints viewInfo tabFuncs enumValues
|
||||
adminInsCtxMap = Map.singleton tn adminInsCtx
|
||||
return $ Map.insert adminRole (adminCtx, adminRootFlds, adminInsCtxMap) m
|
||||
where
|
||||
TableInfo tn _ fields rolePerms constraints pkeyCols viewInfo _ = tabInfo
|
||||
TableInfo tn _ fields rolePerms constraints pkeyCols viewInfo _ enumValues = tabInfo
|
||||
validConstraints = mkValidConstraints constraints
|
||||
colInfos = getValidCols fields
|
||||
validColNames = map pgiName colInfos
|
||||
@ -556,7 +565,7 @@ noFilter = annBoolExpTrue
|
||||
|
||||
mkGCtxMap
|
||||
:: (MonadError QErr m)
|
||||
=> TableCache -> FunctionCache -> m GCtxMap
|
||||
=> TableCache PGColInfo -> FunctionCache -> m GCtxMap
|
||||
mkGCtxMap tableCache functionCache = do
|
||||
typesMapL <- mapM (mkGCtxMapTable tableCache functionCache) $
|
||||
filter tableFltr $ Map.elems tableCache
|
||||
@ -564,8 +573,8 @@ mkGCtxMap tableCache functionCache = do
|
||||
return $ flip Map.map typesMap $ \(ty, flds, insCtxMap) ->
|
||||
mkGCtx ty flds insCtxMap
|
||||
where
|
||||
tableFltr ti = not (tiSystemDefined ti)
|
||||
&& isValidObjectName (tiName ti)
|
||||
tableFltr ti = not (_tiSystemDefined ti)
|
||||
&& isValidObjectName (_tiName ti)
|
||||
|
||||
-- | build GraphQL schema from postgres tables and functions
|
||||
buildGCtxMapPG
|
||||
@ -671,13 +680,13 @@ mkGCtx tyAgg (RootFields queryFields mutationFields) insCtxMap =
|
||||
mFlds = rootFieldInfos mutationFields
|
||||
rootFieldInfos = map snd . Map.elems
|
||||
|
||||
anyGeoTypes = any isGeoType colTys
|
||||
anyGeoTypes = any (isScalarColumnWhere isGeoType) colTys
|
||||
allComparableTypes =
|
||||
if anyGeoTypes
|
||||
-- due to casting, we need to generate both geometry and geography
|
||||
-- operations even if just one of the two appears in the schema
|
||||
then Set.union (Set.fromList [PGGeometry, PGGeography]) colTys
|
||||
then Set.union (Set.fromList [PGColumnScalar PGGeometry, PGColumnScalar PGGeography]) colTys
|
||||
else colTys
|
||||
allScalarTypes = allComparableTypes <> scalars
|
||||
allScalarTypes = (allComparableTypes ^.. folded._PGColumnScalar) <> scalars
|
||||
|
||||
wiredInGeoInputTypes = guard anyGeoTypes *> map TIInpObj geoInputTypes
|
||||
|
@ -6,7 +6,6 @@ module Hasura.GraphQL.Schema.BoolExp
|
||||
, mkBoolExpInp
|
||||
) where
|
||||
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
@ -16,17 +15,14 @@ import Hasura.Prelude
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
|
||||
addTypeSuffix :: T.Text -> G.NamedType -> G.NamedType
|
||||
addTypeSuffix suffix baseType = G.NamedType $ G.unNamedType baseType <> G.Name suffix
|
||||
|
||||
typeToDescription :: G.NamedType -> G.Description
|
||||
typeToDescription = G.Description . G.unName . G.unNamedType
|
||||
|
||||
mkCompExpTy :: PGScalarType -> G.NamedType
|
||||
mkCompExpTy = addTypeSuffix "_comparison_exp" . mkScalarTy
|
||||
mkCompExpTy :: PGColumnType -> G.NamedType
|
||||
mkCompExpTy = addTypeSuffix "_comparison_exp" . mkColumnType
|
||||
|
||||
mkCastExpTy :: PGScalarType -> G.NamedType
|
||||
mkCastExpTy = addTypeSuffix "_cast_exp" . mkScalarTy
|
||||
mkCastExpTy :: PGColumnType -> G.NamedType
|
||||
mkCastExpTy = addTypeSuffix "_cast_exp" . mkColumnType
|
||||
|
||||
-- TODO(shahidhk) this should ideally be st_d_within_geometry
|
||||
{-
|
||||
@ -51,48 +47,46 @@ stDWithinGeographyInpTy = G.NamedType "st_d_within_geography_input"
|
||||
|
||||
-- | Makes an input type declaration for the @_cast@ field of a comparison expression.
|
||||
-- (Currently only used for casting between geometry and geography types.)
|
||||
mkCastExpressionInputType :: PGScalarType -> [PGScalarType] -> InpObjTyInfo
|
||||
mkCastExpressionInputType :: PGColumnType -> [PGColumnType] -> InpObjTyInfo
|
||||
mkCastExpressionInputType sourceType targetTypes =
|
||||
mkHsraInpTyInfo (Just description) (mkCastExpTy sourceType) (fromInpValL targetFields)
|
||||
where
|
||||
description = mconcat
|
||||
[ "Expression to compare the result of casting a column of type "
|
||||
, typeToDescription $ mkScalarTy sourceType
|
||||
, typeToDescription $ mkColumnType sourceType
|
||||
, ". Multiple cast targets are combined with logical 'AND'."
|
||||
]
|
||||
targetFields = map targetField targetTypes
|
||||
targetField targetType = InpValInfo
|
||||
Nothing
|
||||
(G.unNamedType $ mkScalarTy targetType)
|
||||
(G.unNamedType $ mkColumnType targetType)
|
||||
Nothing
|
||||
(G.toGT $ mkCompExpTy targetType)
|
||||
|
||||
--- | make compare expression input type
|
||||
mkCompExpInp :: PGScalarType -> InpObjTyInfo
|
||||
mkCompExpInp :: PGColumnType -> InpObjTyInfo
|
||||
mkCompExpInp colTy =
|
||||
InpObjTyInfo (Just tyDesc) (mkCompExpTy colTy) (fromInpValL $ concat
|
||||
[ map (mk colScalarTy) typedOps
|
||||
, map (mk $ G.toLT $ G.toNT colScalarTy) listOps
|
||||
, bool [] (map (mk $ mkScalarTy PGText) stringOps) isStringTy
|
||||
, bool [] (map jsonbOpToInpVal jsonbOps) isJsonbTy
|
||||
, bool [] (stDWithinGeoOpInpVal stDWithinGeometryInpTy :
|
||||
map geoOpToInpVal (geoOps ++ geomOps)) isGeometryType
|
||||
, bool [] (stDWithinGeoOpInpVal stDWithinGeographyInpTy :
|
||||
map geoOpToInpVal geoOps) isGeographyType
|
||||
[ map (mk colGqlType) typedOps
|
||||
, map (mk $ G.toLT $ G.toNT colGqlType) listOps
|
||||
, guard (isScalarWhere isStringType) *> map (mk $ mkScalarTy PGText) stringOps
|
||||
, guard (isScalarWhere (== PGJSONB)) *> map jsonbOpToInpVal jsonbOps
|
||||
, guard (isScalarWhere (== PGGeometry)) *>
|
||||
(stDWithinGeoOpInpVal stDWithinGeometryInpTy : map geoOpToInpVal (geoOps ++ geomOps))
|
||||
, guard (isScalarWhere (== PGGeography)) *>
|
||||
(stDWithinGeoOpInpVal stDWithinGeographyInpTy : map geoOpToInpVal geoOps)
|
||||
, [InpValInfo Nothing "_is_null" Nothing $ G.TypeNamed (G.Nullability True) $ G.NamedType "Boolean"]
|
||||
, maybeToList castOpInputValue
|
||||
, castOpInputValues
|
||||
]) TLHasuraType
|
||||
where
|
||||
colGqlType = mkColumnType colTy
|
||||
colTyDesc = typeToDescription colGqlType
|
||||
tyDesc =
|
||||
"expression to compare columns of type " <> colTyDesc
|
||||
<> ". All fields are combined with logical 'AND'."
|
||||
isStringTy = case colTy of
|
||||
PGVarchar -> True
|
||||
PGText -> True
|
||||
_ -> False
|
||||
isScalarWhere = flip isScalarColumnWhere colTy
|
||||
mk t n = InpValInfo Nothing n Nothing $ G.toGT t
|
||||
colScalarTy = mkScalarTy colTy
|
||||
-- colScalarListTy = GA.GTList colGTy
|
||||
|
||||
typedOps =
|
||||
["_eq", "_neq", "_gt", "_lt", "_gte", "_lte"]
|
||||
listOps =
|
||||
@ -105,10 +99,7 @@ mkCompExpInp colTy =
|
||||
, "_similar", "_nsimilar"
|
||||
]
|
||||
|
||||
isJsonbTy = case colTy of
|
||||
PGJSONB -> True
|
||||
_ -> False
|
||||
jsonbOpToInpVal (op, ty, desc) = InpValInfo (Just desc) op Nothing ty
|
||||
jsonbOpToInpVal (opName, ty, desc) = InpValInfo (Just desc) opName Nothing ty
|
||||
jsonbOps =
|
||||
[ ( "_contains"
|
||||
, G.toGT $ mkScalarTy PGJSONB
|
||||
@ -132,9 +123,9 @@ mkCompExpInp colTy =
|
||||
)
|
||||
]
|
||||
|
||||
castOpInputValue =
|
||||
castOpInputValues =
|
||||
-- currently, only geometry/geography types support casting
|
||||
guard (isGeoType colTy) $>
|
||||
guard (isScalarWhere isGeoType) $>
|
||||
InpValInfo Nothing "_cast" Nothing (G.toGT $ mkCastExpTy colTy)
|
||||
|
||||
stDWithinGeoOpInpVal ty =
|
||||
@ -142,19 +133,8 @@ mkCompExpInp colTy =
|
||||
stDWithinGeoDesc =
|
||||
"is the column within a distance from a " <> colTyDesc <> " value"
|
||||
|
||||
-- Geometry related ops
|
||||
isGeometryType = case colTy of
|
||||
PGGeometry -> True
|
||||
_ -> False
|
||||
|
||||
-- Geography related ops
|
||||
isGeographyType = case colTy of
|
||||
PGGeography -> True
|
||||
_ -> False
|
||||
|
||||
geoOpToInpVal (op, desc) =
|
||||
InpValInfo (Just desc) op Nothing $ G.toGT $ mkScalarTy colTy
|
||||
colTyDesc = typeToDescription $ mkScalarTy colTy
|
||||
geoOpToInpVal (opName, desc) =
|
||||
InpValInfo (Just desc) opName Nothing $ G.toGT colGqlType
|
||||
|
||||
-- operators applicable only to geometry types
|
||||
geomOps :: [(G.Name, G.Description)]
|
||||
@ -192,12 +172,10 @@ geoInputTypes :: [InpObjTyInfo]
|
||||
geoInputTypes =
|
||||
[ stDWithinGeometryInputType
|
||||
, stDWithinGeographyInputType
|
||||
, castGeometryInputType
|
||||
, castGeographyInputType
|
||||
, mkCastExpressionInputType (PGColumnScalar PGGeometry) [PGColumnScalar PGGeography]
|
||||
, mkCastExpressionInputType (PGColumnScalar PGGeography) [PGColumnScalar PGGeometry]
|
||||
]
|
||||
|
||||
where
|
||||
|
||||
stDWithinGeometryInputType =
|
||||
mkHsraInpTyInfo Nothing stDWithinGeometryInpTy $ fromInpValL
|
||||
[ InpValInfo Nothing "from" Nothing $ G.toGT $ G.toNT $ mkScalarTy PGGeometry
|
||||
@ -211,9 +189,6 @@ geoInputTypes =
|
||||
Nothing "use_spheroid" (Just $ G.VCBoolean True) $ G.toGT $ mkScalarTy PGBoolean
|
||||
]
|
||||
|
||||
castGeometryInputType = mkCastExpressionInputType PGGeometry [PGGeography]
|
||||
castGeographyInputType = mkCastExpressionInputType PGGeography [PGGeometry]
|
||||
|
||||
mkBoolExpName :: QualifiedTable -> G.Name
|
||||
mkBoolExpName tn =
|
||||
qualObjectToName tn <> "_bool_exp"
|
||||
|
@ -1,14 +1,17 @@
|
||||
module Hasura.GraphQL.Schema.Common
|
||||
( qualObjectToName
|
||||
, addTypeSuffix
|
||||
, fromInpValL
|
||||
|
||||
, mkColName
|
||||
, mkColumnType
|
||||
, mkRelName
|
||||
, mkAggRelName
|
||||
|
||||
, SelField
|
||||
|
||||
, mkTableTy
|
||||
, mkTableEnumType
|
||||
, mkTableAggTy
|
||||
|
||||
, mkColumnEnumVal
|
||||
@ -22,12 +25,14 @@ import Hasura.Prelude
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
|
||||
type SelField =
|
||||
Either PGColInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int, Bool)
|
||||
type SelField = Either PGColInfo (RelInfo, Bool, AnnBoolExpPartialSQL, Maybe Int, Bool)
|
||||
|
||||
qualObjectToName :: (ToTxt a) => QualifiedObject a -> G.Name
|
||||
qualObjectToName = G.Name . snakeCaseQualObject
|
||||
|
||||
addTypeSuffix :: Text -> G.NamedType -> G.NamedType
|
||||
addTypeSuffix suffix baseType = G.NamedType $ G.unNamedType baseType <> G.Name suffix
|
||||
|
||||
fromInpValL :: [InpValInfo] -> Map.HashMap G.Name InpValInfo
|
||||
fromInpValL = mapFromL _iviName
|
||||
|
||||
@ -40,13 +45,19 @@ mkAggRelName rn = G.Name $ relNameToTxt rn <> "_aggregate"
|
||||
mkColName :: PGCol -> G.Name
|
||||
mkColName (PGCol n) = G.Name n
|
||||
|
||||
mkColumnType :: PGColumnType -> G.NamedType
|
||||
mkColumnType = \case
|
||||
PGColumnScalar scalarType -> mkScalarTy scalarType
|
||||
PGColumnEnumReference (EnumReference enumTable _) -> mkTableEnumType enumTable
|
||||
|
||||
mkTableTy :: QualifiedTable -> G.NamedType
|
||||
mkTableTy =
|
||||
G.NamedType . qualObjectToName
|
||||
mkTableTy = G.NamedType . qualObjectToName
|
||||
|
||||
mkTableEnumType :: QualifiedTable -> G.NamedType
|
||||
mkTableEnumType = addTypeSuffix "_enum" . mkTableTy
|
||||
|
||||
mkTableAggTy :: QualifiedTable -> G.NamedType
|
||||
mkTableAggTy tn =
|
||||
G.NamedType $ qualObjectToName tn <> "_aggregate"
|
||||
mkTableAggTy = addTypeSuffix "_aggregate" . mkTableTy
|
||||
|
||||
-- used for 'distinct_on' in select and upsert's 'update columns'
|
||||
mkColumnEnumVal :: PGCol -> EnumValInfo
|
||||
|
@ -17,7 +17,7 @@ import Hasura.SQL.Types
|
||||
mkPGColInp :: PGColInfo -> InpValInfo
|
||||
mkPGColInp (PGColInfo colName colTy _) =
|
||||
InpValInfo Nothing (G.Name $ getPGColTxt colName) Nothing $
|
||||
G.toGT $ mkScalarTy colTy
|
||||
G.toGT $ mkColumnType colTy
|
||||
|
||||
-- table_mutation_response
|
||||
mkMutRespTy :: QualifiedTable -> G.NamedType
|
||||
|
@ -177,11 +177,11 @@ mkInsMutFld tn isUpsertable =
|
||||
onConflictArg =
|
||||
InpValInfo (Just onConflictDesc) "on_conflict" Nothing $ G.toGT $ mkOnConflictInpTy tn
|
||||
|
||||
mkConstriantTy :: QualifiedTable -> [ConstraintName] -> EnumTyInfo
|
||||
mkConstriantTy tn cons = enumTyInfo
|
||||
mkConstraintTy :: QualifiedTable -> [ConstraintName] -> EnumTyInfo
|
||||
mkConstraintTy tn cons = enumTyInfo
|
||||
where
|
||||
enumTyInfo = mkHsraEnumTyInfo (Just desc) (mkConstraintInpTy tn) $
|
||||
mapFromL _eviVal $ map mkConstraintEnumVal cons
|
||||
EnumValuesSynthetic . mapFromL _eviVal $ map mkConstraintEnumVal cons
|
||||
|
||||
desc = G.Description $
|
||||
"unique or primary key constraints on table " <>> tn
|
||||
@ -194,15 +194,15 @@ mkUpdColumnTy :: QualifiedTable -> [PGCol] -> EnumTyInfo
|
||||
mkUpdColumnTy tn cols = enumTyInfo
|
||||
where
|
||||
enumTyInfo = mkHsraEnumTyInfo (Just desc) (mkUpdColumnInpTy tn) $
|
||||
mapFromL _eviVal $ map mkColumnEnumVal cols
|
||||
EnumValuesSynthetic . mapFromL _eviVal $ map mkColumnEnumVal cols
|
||||
|
||||
desc = G.Description $
|
||||
"update columns of table " <>> tn
|
||||
|
||||
mkConflictActionTy :: Bool -> EnumTyInfo
|
||||
mkConflictActionTy updAllowed =
|
||||
mkHsraEnumTyInfo (Just desc) conflictActionTy $ mapFromL _eviVal $
|
||||
[enumValIgnore] <> bool [] [enumValUpdate] updAllowed
|
||||
mkHsraEnumTyInfo (Just desc) conflictActionTy $
|
||||
EnumValuesSynthetic . mapFromL _eviVal $ [enumValIgnore] <> bool [] [enumValUpdate] updAllowed
|
||||
where
|
||||
desc = G.Description "conflict action"
|
||||
enumValIgnore = EnumValInfo (Just "ignore the insert on this row")
|
||||
@ -216,7 +216,7 @@ mkOnConflictTypes tn uniqueOrPrimaryCons cols =
|
||||
bool [] tyInfos
|
||||
where
|
||||
tyInfos = [ TIEnum $ mkConflictActionTy isUpdAllowed
|
||||
, TIEnum $ mkConstriantTy tn uniqueOrPrimaryCons
|
||||
, TIEnum $ mkConstraintTy tn uniqueOrPrimaryCons
|
||||
, TIEnum $ mkUpdColumnTy tn cols
|
||||
, TIInpObj $ mkOnConflictInp tn
|
||||
]
|
||||
|
@ -21,8 +21,8 @@ ordByTy = G.NamedType "order_by"
|
||||
|
||||
ordByEnumTy :: EnumTyInfo
|
||||
ordByEnumTy =
|
||||
mkHsraEnumTyInfo (Just desc) ordByTy $ mapFromL _eviVal $
|
||||
map mkEnumVal enumVals
|
||||
mkHsraEnumTyInfo (Just desc) ordByTy $
|
||||
EnumValuesSynthetic . mapFromL _eviVal $ map mkEnumVal enumVals
|
||||
where
|
||||
desc = G.Description "column ordering options"
|
||||
mkEnumVal (n, d) =
|
||||
|
@ -28,7 +28,7 @@ mkSelColumnTy :: QualifiedTable -> [PGCol] -> EnumTyInfo
|
||||
mkSelColumnTy tn cols = enumTyInfo
|
||||
where
|
||||
enumTyInfo = mkHsraEnumTyInfo (Just desc) (mkSelColumnInpTy tn) $
|
||||
mapFromL _eviVal $ map mkColumnEnumVal cols
|
||||
EnumValuesSynthetic . mapFromL _eviVal $ map mkColumnEnumVal cols
|
||||
|
||||
desc = G.Description $
|
||||
"select columns of table " <>> tn
|
||||
@ -39,8 +39,7 @@ mkSelColumnInpTy tn =
|
||||
G.NamedType $ qualObjectToName tn <> "_select_column"
|
||||
|
||||
mkTableAggFldsTy :: QualifiedTable -> G.NamedType
|
||||
mkTableAggFldsTy tn =
|
||||
G.NamedType $ qualObjectToName tn <> "_aggregate_fields"
|
||||
mkTableAggFldsTy = addTypeSuffix "_aggregate_fields" . mkTableTy
|
||||
|
||||
mkTableColAggFldsTy :: G.Name -> QualifiedTable -> G.NamedType
|
||||
mkTableColAggFldsTy op tn =
|
||||
@ -50,17 +49,13 @@ mkTableByPkName :: QualifiedTable -> G.Name
|
||||
mkTableByPkName tn = qualObjectToName tn <> "_by_pk"
|
||||
|
||||
-- Support argument params for PG columns
|
||||
mkPGColParams :: PGScalarType -> ParamMap
|
||||
mkPGColParams = \case
|
||||
PGJSONB -> jsonParams
|
||||
PGJSON -> jsonParams
|
||||
_ -> Map.empty
|
||||
where
|
||||
pathDesc = "JSON select path"
|
||||
jsonParams = Map.fromList
|
||||
[ (G.Name "path", InpValInfo (Just pathDesc) "path" Nothing $
|
||||
G.toGT $ mkScalarTy PGText)
|
||||
]
|
||||
mkPGColParams :: PGColumnType -> ParamMap
|
||||
mkPGColParams colType
|
||||
| isScalarColumnWhere isJSONType colType =
|
||||
let pathDesc = "JSON select path"
|
||||
in Map.fromList
|
||||
[ (G.Name "path", InpValInfo (Just pathDesc) "path" Nothing $ G.toGT $ mkScalarTy PGText) ]
|
||||
| otherwise = Map.empty
|
||||
|
||||
mkPGColFld :: PGColInfo -> ObjFldInfo
|
||||
mkPGColFld (PGColInfo colName colTy isNullable) =
|
||||
@ -68,9 +63,9 @@ mkPGColFld (PGColInfo colName colTy isNullable) =
|
||||
where
|
||||
n = G.Name $ getPGColTxt colName
|
||||
ty = bool notNullTy nullTy isNullable
|
||||
scalarTy = mkScalarTy colTy
|
||||
notNullTy = G.toGT $ G.toNT scalarTy
|
||||
nullTy = G.toGT scalarTy
|
||||
columnType = mkColumnType colTy
|
||||
notNullTy = G.toGT $ G.toNT columnType
|
||||
nullTy = G.toGT columnType
|
||||
|
||||
-- where: table_bool_exp
|
||||
-- limit: Int
|
||||
@ -222,7 +217,7 @@ type table_<agg-op>_fields{
|
||||
mkTableColAggFldsObj
|
||||
:: QualifiedTable
|
||||
-> G.Name
|
||||
-> (PGScalarType -> G.NamedType)
|
||||
-> (PGColumnType -> G.NamedType)
|
||||
-> [PGColInfo]
|
||||
-> ObjTyInfo
|
||||
mkTableColAggFldsObj tn op f cols =
|
||||
@ -274,7 +269,7 @@ mkSelFldPKey tn cols =
|
||||
args = fromInpValL $ map colInpVal cols
|
||||
ty = G.toGT $ mkTableTy tn
|
||||
colInpVal (PGColInfo n typ _) =
|
||||
InpValInfo Nothing (mkColName n) Nothing $ G.toGT $ G.toNT $ mkScalarTy typ
|
||||
InpValInfo Nothing (mkColName n) Nothing $ G.toGT $ G.toNT $ mkColumnType typ
|
||||
|
||||
{-
|
||||
|
||||
|
@ -33,9 +33,8 @@ import Hasura.GraphQL.Validate.Types
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.RQL.Types.QueryCollection
|
||||
|
||||
import Hasura.SQL.Types (PGScalarType)
|
||||
import Hasura.SQL.Value (PGColValue,
|
||||
parsePGValue)
|
||||
import Hasura.SQL.Types (PGScalarTyped)
|
||||
import Hasura.SQL.Value (PGColValue)
|
||||
|
||||
data QueryParts
|
||||
= QueryParts
|
||||
@ -118,8 +117,8 @@ getAnnVarVals varDefsL inpVals = withPathK "variableValues" $ do
|
||||
showVars :: (Functor f, Foldable f) => f G.Variable -> Text
|
||||
showVars = showNames . fmap G.unVariable
|
||||
|
||||
type VarPGTypes = Map.HashMap G.Variable PGScalarType
|
||||
type AnnPGVarVals = Map.HashMap G.Variable (PGScalarType, PGColValue)
|
||||
type VarPGTypes = Map.HashMap G.Variable PGColumnType
|
||||
type AnnPGVarVals = Map.HashMap G.Variable (PGScalarTyped PGColValue)
|
||||
|
||||
-- this is in similar spirit to getAnnVarVals, however
|
||||
-- here it is much simpler and can get rid of typemap requirement
|
||||
@ -142,7 +141,7 @@ getAnnPGVarVals varTypes varValsM =
|
||||
-- TODO: we don't have the graphql type
|
||||
-- " of type: " <> T.pack (show varType) <>
|
||||
" in variableValues"
|
||||
(varType,) <$> runAesonParser (parsePGValue varType) varVal
|
||||
parsePGScalarValue varType varVal
|
||||
where
|
||||
varVals = fromMaybe Map.empty varValsM
|
||||
|
||||
|
@ -19,6 +19,8 @@ import qualified Data.Text as T
|
||||
import qualified Data.Vector as V
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import qualified Hasura.RQL.Types as RQL
|
||||
|
||||
import Hasura.GraphQL.Utils
|
||||
import Hasura.GraphQL.Validate.Context
|
||||
import Hasura.GraphQL.Validate.Types
|
||||
@ -249,21 +251,27 @@ validateNamedTypeVal inpValParser (nullability, nt) val = do
|
||||
fmap (AGObject nt) . mapM (validateObject inpValParser ioti)
|
||||
TIEnum eti ->
|
||||
withParsed gType (getEnum inpValParser) val $
|
||||
fmap (AGEnum nt) . mapM (validateEnum eti)
|
||||
fmap (AGEnum nt) . validateEnum eti
|
||||
TIScalar (ScalarTyInfo _ pgColTy _) ->
|
||||
withParsed gType (getScalar inpValParser) val $
|
||||
fmap (AGScalar pgColTy) . mapM (validateScalar pgColTy)
|
||||
where
|
||||
throwUnexpTypeErr ty = throw500 $ "unexpected " <> ty <> " type info for: "
|
||||
<> showNamedTy nt
|
||||
validateEnum enumTyInfo enumVal =
|
||||
if Map.member enumVal (_etiValues enumTyInfo)
|
||||
then return enumVal
|
||||
else throwVE $ "unexpected value " <>
|
||||
showName (G.unEnumValue enumVal) <>
|
||||
" for enum: " <> showNamedTy nt
|
||||
validateScalar pgColTy =
|
||||
runAesonParser (parsePGValue pgColTy)
|
||||
|
||||
validateEnum enumTyInfo maybeEnumValue = case (_etiValues enumTyInfo, maybeEnumValue) of
|
||||
(EnumValuesSynthetic _, Nothing) -> pure $ AGESynthetic Nothing
|
||||
(EnumValuesReference reference, Nothing) -> pure $ AGEReference reference Nothing
|
||||
(EnumValuesSynthetic values, Just enumValue)
|
||||
| Map.member enumValue values -> pure $ AGESynthetic (Just enumValue)
|
||||
(EnumValuesReference reference@(EnumReference _ values), Just enumValue)
|
||||
| rqlEnumValue <- RQL.EnumValue . G.unName $ G.unEnumValue enumValue
|
||||
, Map.member rqlEnumValue values
|
||||
-> pure $ AGEReference reference (Just rqlEnumValue)
|
||||
(_, Just enumValue) -> throwVE $
|
||||
"unexpected value " <> showName (G.unEnumValue enumValue) <> " for enum: " <> showNamedTy nt
|
||||
|
||||
validateScalar pgColTy = runAesonParser (parsePGValue pgColTy)
|
||||
gType = G.TypeNamed nullability nt
|
||||
|
||||
validateList
|
||||
|
@ -21,6 +21,8 @@ module Hasura.GraphQL.Validate.Types
|
||||
, EnumTyInfo(..)
|
||||
, mkHsraEnumTyInfo
|
||||
|
||||
, EnumValuesInfo(..)
|
||||
, normalizeEnumValues
|
||||
, EnumValInfo(..)
|
||||
, InpObjFldMap
|
||||
, InpObjTyInfo(..)
|
||||
@ -52,6 +54,7 @@ module Hasura.GraphQL.Validate.Types
|
||||
, TypeLoc (..)
|
||||
, typeEq
|
||||
, AnnGValue(..)
|
||||
, AnnGEnumValue(..)
|
||||
, AnnGObject
|
||||
, hasNullVal
|
||||
, getAnnInpValKind
|
||||
@ -60,7 +63,6 @@ module Hasura.GraphQL.Validate.Types
|
||||
) where
|
||||
|
||||
import Hasura.Prelude
|
||||
import Instances.TH.Lift ()
|
||||
|
||||
import qualified Data.Aeson as J
|
||||
import qualified Data.Aeson.Casing as J
|
||||
@ -73,14 +75,15 @@ import qualified Language.GraphQL.Draft.Syntax as G
|
||||
import qualified Language.GraphQL.Draft.TH as G
|
||||
import qualified Language.Haskell.TH.Syntax as TH
|
||||
|
||||
import qualified Hasura.RQL.Types.Column as RQL
|
||||
|
||||
import Hasura.GraphQL.Utils
|
||||
import Hasura.RQL.Instances ()
|
||||
import Hasura.RQL.Types.RemoteSchema
|
||||
import Hasura.SQL.Types
|
||||
import Hasura.SQL.Value
|
||||
|
||||
-- | Typeclass for equating relevant properties of various GraphQL types
|
||||
-- | defined below
|
||||
-- | Typeclass for equating relevant properties of various GraphQL types defined below
|
||||
class EquatableGType a where
|
||||
type EqProps a
|
||||
getEqProps :: a -> EqProps a
|
||||
@ -99,21 +102,39 @@ fromEnumValDef :: G.EnumValueDefinition -> EnumValInfo
|
||||
fromEnumValDef (G.EnumValueDefinition descM val _) =
|
||||
EnumValInfo descM val False
|
||||
|
||||
data EnumValuesInfo
|
||||
= EnumValuesSynthetic !(Map.HashMap G.EnumValue EnumValInfo)
|
||||
-- ^ Values for an enum that exists only in the GraphQL schema and does not have any external
|
||||
-- source of truth.
|
||||
| EnumValuesReference !RQL.EnumReference
|
||||
-- ^ Values for an enum that is backed by an enum table reference (see "Hasura.RQL.Schema.Enum").
|
||||
deriving (Show, Eq, TH.Lift)
|
||||
|
||||
normalizeEnumValues :: EnumValuesInfo -> Map.HashMap G.EnumValue EnumValInfo
|
||||
normalizeEnumValues = \case
|
||||
EnumValuesSynthetic values -> values
|
||||
EnumValuesReference (RQL.EnumReference _ values) ->
|
||||
mapFromL _eviVal . flip map (Map.toList values) $
|
||||
\(RQL.EnumValue name, RQL.EnumValueInfo maybeDescription) -> EnumValInfo
|
||||
{ _eviVal = G.EnumValue $ G.Name name
|
||||
, _eviDesc = G.Description <$> maybeDescription
|
||||
, _eviIsDeprecated = False }
|
||||
|
||||
data EnumTyInfo
|
||||
= EnumTyInfo
|
||||
{ _etiDesc :: !(Maybe G.Description)
|
||||
, _etiName :: !G.NamedType
|
||||
, _etiValues :: !(Map.HashMap G.EnumValue EnumValInfo)
|
||||
, _etiValues :: !EnumValuesInfo
|
||||
, _etiLoc :: !TypeLoc
|
||||
} deriving (Show, Eq, TH.Lift)
|
||||
|
||||
instance EquatableGType EnumTyInfo where
|
||||
type EqProps EnumTyInfo = (G.NamedType, Map.HashMap G.EnumValue EnumValInfo)
|
||||
getEqProps ety = (,) (_etiName ety) (_etiValues ety)
|
||||
getEqProps ety = (,) (_etiName ety) (normalizeEnumValues $ _etiValues ety)
|
||||
|
||||
fromEnumTyDef :: G.EnumTypeDefinition -> TypeLoc -> EnumTyInfo
|
||||
fromEnumTyDef (G.EnumTypeDefinition descM n _ valDefs) loc =
|
||||
EnumTyInfo descM (G.NamedType n) enumVals loc
|
||||
EnumTyInfo descM (G.NamedType n) (EnumValuesSynthetic enumVals) loc
|
||||
where
|
||||
enumVals = Map.fromList
|
||||
[(G._evdName valDef, fromEnumValDef valDef) | valDef <- valDefs]
|
||||
@ -121,7 +142,7 @@ fromEnumTyDef (G.EnumTypeDefinition descM n _ valDefs) loc =
|
||||
mkHsraEnumTyInfo
|
||||
:: Maybe G.Description
|
||||
-> G.NamedType
|
||||
-> Map.HashMap G.EnumValue EnumValInfo
|
||||
-> EnumValuesInfo
|
||||
-> EnumTyInfo
|
||||
mkHsraEnumTyInfo descM ty enumVals =
|
||||
EnumTyInfo descM ty enumVals TLHasuraType
|
||||
@ -659,9 +680,15 @@ data AnnInpVal
|
||||
|
||||
type AnnGObject = OMap.InsOrdHashMap G.Name AnnInpVal
|
||||
|
||||
-- | See 'EnumValuesInfo' for information about what these cases mean.
|
||||
data AnnGEnumValue
|
||||
= AGESynthetic !(Maybe G.EnumValue)
|
||||
| AGEReference !RQL.EnumReference !(Maybe RQL.EnumValue)
|
||||
deriving (Show, Eq)
|
||||
|
||||
data AnnGValue
|
||||
= AGScalar !PGScalarType !(Maybe PGColValue)
|
||||
| AGEnum !G.NamedType !(Maybe G.EnumValue)
|
||||
| AGEnum !G.NamedType !AnnGEnumValue
|
||||
| AGObject !G.NamedType !(Maybe AnnGObject)
|
||||
| AGArray !G.ListType !(Maybe [AnnInpVal])
|
||||
deriving (Show, Eq)
|
||||
@ -678,11 +705,12 @@ instance J.ToJSON AnnGValue where
|
||||
|
||||
hasNullVal :: AnnGValue -> Bool
|
||||
hasNullVal = \case
|
||||
AGScalar _ Nothing -> True
|
||||
AGEnum _ Nothing -> True
|
||||
AGObject _ Nothing -> True
|
||||
AGArray _ Nothing -> True
|
||||
_ -> False
|
||||
AGScalar _ Nothing -> True
|
||||
AGEnum _ (AGESynthetic Nothing) -> True
|
||||
AGEnum _ (AGEReference _ Nothing) -> True
|
||||
AGObject _ Nothing -> True
|
||||
AGArray _ Nothing -> True
|
||||
_ -> False
|
||||
|
||||
getAnnInpValKind :: AnnGValue -> Text
|
||||
getAnnInpValKind = \case
|
||||
|
@ -3,11 +3,12 @@ module Hasura.Prelude
|
||||
, onNothing
|
||||
, onJust
|
||||
, onLeft
|
||||
, choice
|
||||
, bsToTxt
|
||||
, txtToBs
|
||||
) where
|
||||
|
||||
import Control.Applicative as M ((<|>))
|
||||
import Control.Applicative as M (Alternative (..))
|
||||
import Control.Monad as M (void, when)
|
||||
import Control.Monad.Base as M
|
||||
import Control.Monad.Except as M
|
||||
@ -19,7 +20,9 @@ import Data.Bool as M (bool)
|
||||
import Data.Data as M (Data (..))
|
||||
import Data.Either as M (lefts, partitionEithers,
|
||||
rights)
|
||||
import Data.Foldable as M (foldrM, toList)
|
||||
import Data.Foldable as M (foldrM, for_, toList,
|
||||
traverse_)
|
||||
import Data.Function as M (on, (&))
|
||||
import Data.Functor as M (($>), (<&>))
|
||||
import Data.Hashable as M (Hashable)
|
||||
import Data.List as M (find, foldl', group,
|
||||
@ -33,6 +36,7 @@ import Data.Ord as M (comparing)
|
||||
import Data.Semigroup as M (Semigroup (..))
|
||||
import Data.String as M (IsString)
|
||||
import Data.Text as M (Text)
|
||||
import Data.Traversable as M (for)
|
||||
import Data.Word as M (Word64)
|
||||
import GHC.Generics as M (Generic)
|
||||
import Prelude as M hiding (fail, init, lookup)
|
||||
@ -51,6 +55,9 @@ onJust m action = maybe (return ()) action m
|
||||
onLeft :: (Monad m) => Either e a -> (e -> m a) -> m a
|
||||
onLeft e f = either f return e
|
||||
|
||||
choice :: (Alternative f) => [f a] -> f a
|
||||
choice = foldr (<|>) empty
|
||||
|
||||
bsToTxt :: B.ByteString -> Text
|
||||
bsToTxt = TE.decodeUtf8With TE.lenientDecode
|
||||
|
||||
|
@ -228,7 +228,7 @@ subTableP1 (CreateEventTriggerQuery name qt insert update delete enableManual re
|
||||
-- can only replace for same table
|
||||
when replace $ do
|
||||
ti' <- askTabInfoFromTrigger name
|
||||
when (tiName ti' /= tiName ti) $ throw400 NotSupported "cannot replace table or schema for trigger"
|
||||
when (_tiName ti' /= _tiName ti) $ throw400 NotSupported "cannot replace table or schema for trigger"
|
||||
|
||||
assertCols ti insert
|
||||
assertCols ti update
|
||||
@ -242,7 +242,7 @@ subTableP1 (CreateEventTriggerQuery name qt insert update delete enableManual re
|
||||
let cols = sosColumns sos
|
||||
case cols of
|
||||
SubCStar -> return ()
|
||||
SubCArray pgcols -> forM_ pgcols (assertPGCol (tiFieldInfoMap ti) "")
|
||||
SubCArray pgcols -> forM_ pgcols (assertPGCol (_tiFieldInfoMap ti) "")
|
||||
|
||||
--(QErrM m, CacheRWM m, MonadTx m, MonadIO m)
|
||||
|
||||
@ -285,7 +285,7 @@ subTableP2
|
||||
:: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasSQLGenCtx m)
|
||||
=> QualifiedTable -> Bool -> EventTriggerConf -> m ()
|
||||
subTableP2 qt replace etc = do
|
||||
allCols <- getCols . tiFieldInfoMap <$> askTabInfo qt
|
||||
allCols <- getCols . _tiFieldInfoMap <$> askTabInfo qt
|
||||
strfyNum <- stringifyNum <$> askSQLGenCtx
|
||||
if replace
|
||||
then do
|
||||
@ -309,7 +309,7 @@ unsubTableP1
|
||||
unsubTableP1 (DeleteEventTriggerQuery name) = do
|
||||
adminOnly
|
||||
ti <- askTabInfoFromTrigger name
|
||||
return $ tiName ti
|
||||
return $ _tiName ti
|
||||
|
||||
unsubTableP2
|
||||
:: (QErrM m, CacheRWM m, MonadTx m)
|
||||
@ -363,7 +363,7 @@ runInvokeEventTrigger (InvokeEventTriggerQuery name payload) = do
|
||||
trigInfo <- askEventTriggerInfo name
|
||||
assertManual $ etiOpsDef trigInfo
|
||||
ti <- askTabInfoFromTrigger name
|
||||
eid <-liftTx $ insertManualEvent (tiName ti) name payload
|
||||
eid <-liftTx $ insertManualEvent (_tiName ti) name payload
|
||||
return $ encJFromJValue $ object ["event_id" .= eid]
|
||||
where
|
||||
assertManual (TriggerOpsDef _ _ _ man) = case man of
|
||||
|
@ -55,6 +55,7 @@ import qualified Hasura.RQL.Types.RemoteSchema as TRS
|
||||
data TableMeta
|
||||
= TableMeta
|
||||
{ _tmTable :: !QualifiedTable
|
||||
, _tmIsEnum :: !Bool
|
||||
, _tmObjectRelationships :: ![DR.ObjRelDef]
|
||||
, _tmArrayRelationships :: ![DR.ArrRelDef]
|
||||
, _tmInsertPermissions :: ![DP.InsPermDef]
|
||||
@ -64,9 +65,9 @@ data TableMeta
|
||||
, _tmEventTriggers :: ![DTS.EventTriggerConf]
|
||||
} deriving (Show, Eq, Lift)
|
||||
|
||||
mkTableMeta :: QualifiedTable -> TableMeta
|
||||
mkTableMeta qt =
|
||||
TableMeta qt [] [] [] [] [] [] []
|
||||
mkTableMeta :: QualifiedTable -> Bool -> TableMeta
|
||||
mkTableMeta qt isEnum =
|
||||
TableMeta qt isEnum [] [] [] [] [] [] []
|
||||
|
||||
makeLenses ''TableMeta
|
||||
|
||||
@ -78,6 +79,7 @@ instance FromJSON TableMeta where
|
||||
|
||||
TableMeta
|
||||
<$> o .: tableKey
|
||||
<*> o .:? isEnumKey .!= False
|
||||
<*> o .:? orKey .!= []
|
||||
<*> o .:? arKey .!= []
|
||||
<*> o .:? ipKey .!= []
|
||||
@ -88,6 +90,7 @@ instance FromJSON TableMeta where
|
||||
|
||||
where
|
||||
tableKey = "table"
|
||||
isEnumKey = "is_enum"
|
||||
orKey = "object_relationships"
|
||||
arKey = "array_relationships"
|
||||
ipKey = "insert_permissions"
|
||||
@ -100,8 +103,8 @@ instance FromJSON TableMeta where
|
||||
HS.fromList (M.keys o) `HS.difference` expectedKeySet
|
||||
|
||||
expectedKeySet =
|
||||
HS.fromList [ tableKey, orKey, arKey, ipKey
|
||||
, spKey, upKey, dpKey, etKey
|
||||
HS.fromList [ tableKey, isEnumKey, orKey, arKey
|
||||
, ipKey, spKey, upKey, dpKey, etKey
|
||||
]
|
||||
|
||||
parseJSON _ =
|
||||
@ -225,8 +228,11 @@ applyQP2 (ReplaceMetadata tables mFunctions mSchemas mCollections mAllowlist) =
|
||||
withPathK "tables" $ do
|
||||
|
||||
-- tables and views
|
||||
indexedForM_ (map _tmTable tables) $ \tableName ->
|
||||
void $ DT.trackExistingTableOrViewP2 tableName False
|
||||
indexedForM_ tables $ \tableMeta -> do
|
||||
let trackQuery = DT.TrackTable
|
||||
{ DT.tName = tableMeta ^. tmTable
|
||||
, DT.tIsEnum = tableMeta ^. tmIsEnum }
|
||||
void $ DT.trackExistingTableOrViewP2 trackQuery
|
||||
|
||||
-- Relationships
|
||||
indexedForM_ tables $ \table -> do
|
||||
@ -288,7 +294,7 @@ applyQP2 (ReplaceMetadata tables mFunctions mSchemas mCollections mAllowlist) =
|
||||
processPerms tabInfo perms =
|
||||
indexedForM_ perms $ \permDef -> do
|
||||
permInfo <- DP.addPermP1 tabInfo permDef
|
||||
DP.addPermP2 (tiName tabInfo) permDef permInfo
|
||||
DP.addPermP2 (_tiName tabInfo) permDef permInfo
|
||||
|
||||
runReplaceMetadata
|
||||
:: ( QErrM m, UserInfoM m, CacheRWM m, MonadTx m
|
||||
@ -311,8 +317,9 @@ $(deriveToJSON defaultOptions ''ExportMetadata)
|
||||
fetchMetadata :: Q.TxE QErr ReplaceMetadata
|
||||
fetchMetadata = do
|
||||
tables <- Q.catchE defaultTxErrorHandler fetchTables
|
||||
let qts = map (uncurry QualifiedObject) tables
|
||||
tableMetaMap = M.fromList $ zip qts $ map mkTableMeta qts
|
||||
let tableMetaMap = M.fromList . flip map tables $ \(schema, name, isEnum) ->
|
||||
let qualifiedName = QualifiedObject schema name
|
||||
in (qualifiedName, mkTableMeta qualifiedName isEnum)
|
||||
|
||||
-- Fetch all the relationships
|
||||
relationships <- Q.catchE defaultTxErrorHandler fetchRelationships
|
||||
@ -384,7 +391,7 @@ fetchMetadata = do
|
||||
|
||||
fetchTables =
|
||||
Q.listQ [Q.sql|
|
||||
SELECT table_schema, table_name from hdb_catalog.hdb_table
|
||||
SELECT table_schema, table_name, is_enum from hdb_catalog.hdb_table
|
||||
WHERE is_system_defined = 'false'
|
||||
|] () False
|
||||
|
||||
|
@ -108,20 +108,20 @@ dropView vn =
|
||||
|
||||
procSetObj
|
||||
:: (QErrM m)
|
||||
=> TableInfo -> Maybe ColVals
|
||||
=> TableInfo PGColInfo -> Maybe ColVals
|
||||
-> m (PreSetColsPartial, [Text], [SchemaDependency])
|
||||
procSetObj ti mObj = do
|
||||
(setColTups, deps) <- withPathK "set" $
|
||||
fmap unzip $ forM (HM.toList setObj) $ \(pgCol, val) -> do
|
||||
ty <- askPGType fieldInfoMap pgCol $
|
||||
"column " <> pgCol <<> " not found in table " <>> tn
|
||||
sqlExp <- valueParser (PgTypeSimple ty) val
|
||||
sqlExp <- valueParser (PGTypeSimple ty) val
|
||||
let dep = mkColDep (getDepReason sqlExp) tn pgCol
|
||||
return ((pgCol, sqlExp), dep)
|
||||
return (HM.fromList setColTups, depHeaders, deps)
|
||||
where
|
||||
fieldInfoMap = tiFieldInfoMap ti
|
||||
tn = tiName ti
|
||||
fieldInfoMap = _tiFieldInfoMap ti
|
||||
tn = _tiName ti
|
||||
setObj = fromMaybe mempty mObj
|
||||
depHeaders = getDepHeadersFromVal $ Object $
|
||||
HM.fromList $ map (first getPGColTxt) $ HM.toList setObj
|
||||
@ -130,7 +130,7 @@ procSetObj ti mObj = do
|
||||
|
||||
buildInsPermInfo
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> TableInfo
|
||||
=> TableInfo PGColInfo
|
||||
-> PermDef InsPerm
|
||||
-> m (WithDeps InsPermInfo)
|
||||
buildInsPermInfo tabInfo (PermDef rn (InsPerm chk set mCols) _) =
|
||||
@ -148,8 +148,8 @@ buildInsPermInfo tabInfo (PermDef rn (InsPerm chk set mCols) _) =
|
||||
insColsWithoutPresets = insCols \\ HM.keys setColsSQL
|
||||
return (InsPermInfo (HS.fromList insColsWithoutPresets) vn be setColsSQL reqHdrs, deps)
|
||||
where
|
||||
fieldInfoMap = tiFieldInfoMap tabInfo
|
||||
tn = tiName tabInfo
|
||||
fieldInfoMap = _tiFieldInfoMap tabInfo
|
||||
tn = _tiName tabInfo
|
||||
vn = buildViewName tn rn PTInsert
|
||||
allCols = map pgiName $ getCols fieldInfoMap
|
||||
insCols = fromMaybe allCols $ convColSpec fieldInfoMap <$> mCols
|
||||
@ -213,7 +213,7 @@ $(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''SelPerm)
|
||||
|
||||
buildSelPermInfo
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> TableInfo
|
||||
=> TableInfo PGColInfo
|
||||
-> SelPerm
|
||||
-> m (WithDeps SelPermInfo)
|
||||
buildSelPermInfo tabInfo sp = do
|
||||
@ -235,8 +235,8 @@ buildSelPermInfo tabInfo sp = do
|
||||
return (SelPermInfo (HS.fromList pgCols) tn be mLimit allowAgg depHeaders, deps)
|
||||
|
||||
where
|
||||
tn = tiName tabInfo
|
||||
fieldInfoMap = tiFieldInfoMap tabInfo
|
||||
tn = _tiName tabInfo
|
||||
fieldInfoMap = _tiFieldInfoMap tabInfo
|
||||
allowAgg = or $ spAllowAggregations sp
|
||||
autoInferredErr = "permissions for relationships are automatically inferred"
|
||||
|
||||
@ -283,7 +283,7 @@ type CreateUpdPerm = CreatePerm UpdPerm
|
||||
|
||||
buildUpdPermInfo
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> TableInfo
|
||||
=> TableInfo PGColInfo
|
||||
-> UpdPerm
|
||||
-> m (WithDeps UpdPermInfo)
|
||||
buildUpdPermInfo tabInfo (UpdPerm colSpec set fltr) = do
|
||||
@ -305,8 +305,8 @@ buildUpdPermInfo tabInfo (UpdPerm colSpec set fltr) = do
|
||||
return (UpdPermInfo (HS.fromList updColsWithoutPreSets) tn be setColsSQL reqHeaders, deps)
|
||||
|
||||
where
|
||||
tn = tiName tabInfo
|
||||
fieldInfoMap = tiFieldInfoMap tabInfo
|
||||
tn = _tiName tabInfo
|
||||
fieldInfoMap = _tiFieldInfoMap tabInfo
|
||||
updCols = convColSpec fieldInfoMap colSpec
|
||||
relInUpdErr = "relationships can't be used in update"
|
||||
|
||||
@ -347,7 +347,7 @@ type CreateDelPerm = CreatePerm DelPerm
|
||||
|
||||
buildDelPermInfo
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> TableInfo
|
||||
=> TableInfo PGColInfo
|
||||
-> DelPerm
|
||||
-> m (WithDeps DelPermInfo)
|
||||
buildDelPermInfo tabInfo (DelPerm fltr) = do
|
||||
@ -357,8 +357,8 @@ buildDelPermInfo tabInfo (DelPerm fltr) = do
|
||||
depHeaders = getDependentHeaders fltr
|
||||
return (DelPermInfo tn be depHeaders, deps)
|
||||
where
|
||||
tn = tiName tabInfo
|
||||
fieldInfoMap = tiFieldInfoMap tabInfo
|
||||
tn = _tiName tabInfo
|
||||
fieldInfoMap = _tiFieldInfoMap tabInfo
|
||||
|
||||
type DropDelPerm = DropPerm DelPerm
|
||||
|
||||
|
@ -23,6 +23,7 @@ import Hasura.RQL.GBoolExp
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.Server.Utils
|
||||
import Hasura.SQL.Types
|
||||
import Hasura.SQL.Value
|
||||
|
||||
import qualified Database.PG.Query as Q
|
||||
|
||||
@ -39,7 +40,7 @@ instance ToJSON PermColSpec where
|
||||
toJSON (PCCols cols) = toJSON cols
|
||||
toJSON PCStar = "*"
|
||||
|
||||
convColSpec :: FieldInfoMap -> PermColSpec -> [PGCol]
|
||||
convColSpec :: FieldInfoMap PGColInfo -> PermColSpec -> [PGCol]
|
||||
convColSpec _ (PCCols cols) = cols
|
||||
convColSpec cim PCStar = map pgiName $ getCols cim
|
||||
|
||||
@ -47,18 +48,18 @@ assertPermNotDefined
|
||||
:: (MonadError QErr m)
|
||||
=> RoleName
|
||||
-> PermAccessor a
|
||||
-> TableInfo
|
||||
-> TableInfo PGColInfo
|
||||
-> m ()
|
||||
assertPermNotDefined roleName pa tableInfo =
|
||||
when (permissionIsDefined rpi pa || roleName == adminRole)
|
||||
$ throw400 AlreadyExists $ mconcat
|
||||
[ "'" <> T.pack (show $ permAccToType pa) <> "'"
|
||||
, " permission on " <>> tiName tableInfo
|
||||
, " permission on " <>> _tiName tableInfo
|
||||
, " for role " <>> roleName
|
||||
, " already exists"
|
||||
]
|
||||
where
|
||||
rpi = M.lookup roleName $ tiRolePermInfoMap tableInfo
|
||||
rpi = M.lookup roleName $ _tiRolePermInfoMap tableInfo
|
||||
|
||||
permissionIsDefined
|
||||
:: Maybe RolePermInfo -> PermAccessor a -> Bool
|
||||
@ -69,21 +70,21 @@ assertPermDefined
|
||||
:: (MonadError QErr m)
|
||||
=> RoleName
|
||||
-> PermAccessor a
|
||||
-> TableInfo
|
||||
-> TableInfo PGColInfo
|
||||
-> m ()
|
||||
assertPermDefined roleName pa tableInfo =
|
||||
unless (permissionIsDefined rpi pa) $ throw400 PermissionDenied $ mconcat
|
||||
[ "'" <> T.pack (show $ permAccToType pa) <> "'"
|
||||
, " permission on " <>> tiName tableInfo
|
||||
, " permission on " <>> _tiName tableInfo
|
||||
, " for role " <>> roleName
|
||||
, " does not exist"
|
||||
]
|
||||
where
|
||||
rpi = M.lookup roleName $ tiRolePermInfoMap tableInfo
|
||||
rpi = M.lookup roleName $ _tiRolePermInfoMap tableInfo
|
||||
|
||||
askPermInfo
|
||||
:: (MonadError QErr m)
|
||||
=> TableInfo
|
||||
=> TableInfo PGColInfo
|
||||
-> RoleName
|
||||
-> PermAccessor c
|
||||
-> m c
|
||||
@ -91,14 +92,14 @@ askPermInfo tabInfo roleName pa =
|
||||
case M.lookup roleName rpim >>= (^. paL) of
|
||||
Just c -> return c
|
||||
Nothing -> throw400 PermissionDenied $ mconcat
|
||||
[ pt <> " permisison on " <>> tiName tabInfo
|
||||
[ pt <> " permisison on " <>> _tiName tabInfo
|
||||
, " for role " <>> roleName
|
||||
, " does not exist"
|
||||
]
|
||||
where
|
||||
paL = permAccToLens pa
|
||||
pt = permTypeToCode $ permAccToType pa
|
||||
rpim = tiRolePermInfoMap tabInfo
|
||||
rpim = _tiRolePermInfoMap tabInfo
|
||||
|
||||
savePermToCatalog
|
||||
:: (ToJSON a)
|
||||
@ -174,7 +175,7 @@ data CreatePermP1Res a
|
||||
|
||||
procBoolExp
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> QualifiedTable -> FieldInfoMap -> BoolExp
|
||||
=> QualifiedTable -> FieldInfoMap PGColInfo -> BoolExp
|
||||
-> m (AnnBoolExpPartialSQL, [SchemaDependency])
|
||||
procBoolExp tn fieldInfoMap be = do
|
||||
abe <- annBoolExp valueParser fieldInfoMap be
|
||||
@ -204,22 +205,21 @@ getDependentHeaders (BoolExp boolExp) =
|
||||
|
||||
valueParser
|
||||
:: (MonadError QErr m)
|
||||
=> PgType -> Value -> m PartialSQLExp
|
||||
=> PGType PGColumnType -> Value -> m PartialSQLExp
|
||||
valueParser pgType = \case
|
||||
-- When it is a special variable
|
||||
String t
|
||||
| isUserVar t -> return $ PSESessVar pgType t
|
||||
| isReqUserId t -> return $ PSESessVar pgType userIdHeader
|
||||
| otherwise -> return $ PSESQLExp $
|
||||
S.SETyAnn (S.SELit t) $ S.mkTypeAnn pgType
|
||||
|
||||
| isUserVar t -> return $ mkScalarSessionVar pgType t
|
||||
| isReqUserId t -> return $ mkScalarSessionVar pgType userIdHeader
|
||||
-- Typical value as Aeson's value
|
||||
val -> case pgType of
|
||||
PgTypeSimple columnType -> PSESQLExp <$> txtRHSBuilder columnType val
|
||||
PgTypeArray ofType -> do
|
||||
PGTypeSimple columnType -> PSESQLExp . toTxtValue <$> parsePGScalarValue columnType val
|
||||
PGTypeArray ofType -> do
|
||||
vals <- runAesonParser parseJSON val
|
||||
arrayExp <- S.SEArray <$> indexedForM vals (txtRHSBuilder ofType)
|
||||
return $ PSESQLExp $ S.SETyAnn arrayExp $ S.mkTypeAnn pgType
|
||||
PGScalarTyped scalarType scalarValues <- parsePGScalarValues ofType vals
|
||||
return . PSESQLExp $ S.SETyAnn
|
||||
(S.SEArray $ map (toTxtValue . PGScalarTyped scalarType) scalarValues)
|
||||
(S.mkTypeAnn $ PGTypeArray scalarType)
|
||||
|
||||
injectDefaults :: QualifiedTable -> QualifiedTable -> Q.Query
|
||||
injectDefaults qv qt =
|
||||
@ -258,7 +258,7 @@ class (ToJSON a) => IsPerm a where
|
||||
|
||||
buildPermInfo
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> TableInfo
|
||||
=> TableInfo PGColInfo
|
||||
-> PermDef a
|
||||
-> m (WithDeps (PermInfo a))
|
||||
|
||||
@ -282,7 +282,7 @@ class (ToJSON a) => IsPerm a where
|
||||
getPermAcc2 _ = permAccessor
|
||||
|
||||
validateViewPerm
|
||||
:: (IsPerm a, QErrM m) => PermDef a -> TableInfo -> m ()
|
||||
:: (IsPerm a, QErrM m) => PermDef a -> TableInfo PGColInfo -> m ()
|
||||
validateViewPerm permDef tableInfo =
|
||||
case permAcc of
|
||||
PASelect -> return ()
|
||||
@ -290,13 +290,13 @@ validateViewPerm permDef tableInfo =
|
||||
PAUpdate -> mutableView tn viIsUpdatable viewInfo "updatable"
|
||||
PADelete -> mutableView tn viIsDeletable viewInfo "deletable"
|
||||
where
|
||||
tn = tiName tableInfo
|
||||
viewInfo = tiViewInfo tableInfo
|
||||
tn = _tiName tableInfo
|
||||
viewInfo = _tiViewInfo tableInfo
|
||||
permAcc = getPermAcc1 permDef
|
||||
|
||||
addPermP1
|
||||
:: (QErrM m, CacheRM m, IsPerm a)
|
||||
=> TableInfo -> PermDef a -> m (WithDeps (PermInfo a))
|
||||
=> TableInfo PGColInfo -> PermDef a -> m (WithDeps (PermInfo a))
|
||||
addPermP1 tabInfo pd = do
|
||||
assertPermNotDefined (pdRole pd) (getPermAcc1 pd) tabInfo
|
||||
buildPermInfo tabInfo pd
|
||||
|
@ -35,14 +35,14 @@ import Instances.TH.Lift ()
|
||||
|
||||
validateManualConfig
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap PGColInfo
|
||||
-> RelManualConfig
|
||||
-> m ()
|
||||
validateManualConfig fim rm = do
|
||||
let colMapping = M.toList $ rmColumns rm
|
||||
remoteQt = rmTable rm
|
||||
remoteTabInfo <- askTabInfo remoteQt
|
||||
let remoteFim = tiFieldInfoMap remoteTabInfo
|
||||
let remoteFim = _tiFieldInfoMap remoteTabInfo
|
||||
forM_ colMapping $ \(lCol, rCol) -> do
|
||||
assertPGCol fim "" lCol
|
||||
assertPGCol remoteFim "" rCol
|
||||
@ -70,14 +70,14 @@ persistRel (QualifiedObject sn tn) rn relType relDef comment =
|
||||
|
||||
checkForFldConfilct
|
||||
:: (MonadError QErr m)
|
||||
=> TableInfo
|
||||
=> TableInfo PGColInfo
|
||||
-> FieldName
|
||||
-> m ()
|
||||
checkForFldConfilct tabInfo f =
|
||||
case HM.lookup f (tiFieldInfoMap tabInfo) of
|
||||
case HM.lookup f (_tiFieldInfoMap tabInfo) of
|
||||
Just _ -> throw400 AlreadyExists $ mconcat
|
||||
[ "column/relationship " <>> f
|
||||
, " of table " <>> tiName tabInfo
|
||||
, " of table " <>> _tiName tabInfo
|
||||
, " already exists"
|
||||
]
|
||||
Nothing -> return ()
|
||||
@ -90,7 +90,7 @@ validateObjRel
|
||||
validateObjRel qt (RelDef rn ru _) = do
|
||||
tabInfo <- askTabInfo qt
|
||||
checkForFldConfilct tabInfo (fromRel rn)
|
||||
let fim = tiFieldInfoMap tabInfo
|
||||
let fim = _tiFieldInfoMap tabInfo
|
||||
case ru of
|
||||
RUFKeyOn cn -> assertPGCol fim "" cn
|
||||
RUManual (ObjRelManualConfig rm) -> validateManualConfig fim rm
|
||||
@ -168,11 +168,11 @@ validateArrRel
|
||||
validateArrRel qt (RelDef rn ru _) = do
|
||||
tabInfo <- askTabInfo qt
|
||||
checkForFldConfilct tabInfo (fromRel rn)
|
||||
let fim = tiFieldInfoMap tabInfo
|
||||
let fim = _tiFieldInfoMap tabInfo
|
||||
case ru of
|
||||
RUFKeyOn (ArrRelUsingFKeyOn remoteQt rcn) -> do
|
||||
remoteTabInfo <- askTabInfo remoteQt
|
||||
let rfim = tiFieldInfoMap remoteTabInfo
|
||||
let rfim = _tiFieldInfoMap remoteTabInfo
|
||||
-- Check if 'using' column exists
|
||||
assertPGCol rfim "" rcn
|
||||
RUManual (ArrRelManualConfig rm) ->
|
||||
@ -229,7 +229,7 @@ dropRelP1 :: (UserInfoM m, QErrM m, CacheRM m) => DropRel -> m [SchemaObjId]
|
||||
dropRelP1 (DropRel qt rn cascade) = do
|
||||
adminOnly
|
||||
tabInfo <- askTabInfo qt
|
||||
_ <- askRelType (tiFieldInfoMap tabInfo) rn ""
|
||||
_ <- askRelType (_tiFieldInfoMap tabInfo) rn ""
|
||||
sc <- askSchemaCache
|
||||
let depObjs = getDependentObjs sc relObjId
|
||||
when (depObjs /= [] && not (or cascade)) $ reportDeps depObjs
|
||||
@ -279,7 +279,7 @@ validateRelP1
|
||||
validateRelP1 qt rn = do
|
||||
adminOnly
|
||||
tabInfo <- askTabInfo qt
|
||||
askRelType (tiFieldInfoMap tabInfo) rn ""
|
||||
askRelType (_tiFieldInfoMap tabInfo) rn ""
|
||||
|
||||
setRelCommentP2
|
||||
:: (QErrM m, MonadTx m)
|
||||
|
@ -27,7 +27,7 @@ renameRelP2 qt newRN relInfo = do
|
||||
oldSC <- askSchemaCache
|
||||
tabInfo <- askTabInfo qt
|
||||
-- check for conflicts in fieldInfoMap
|
||||
case HM.lookup (fromRel newRN) $ tiFieldInfoMap tabInfo of
|
||||
case HM.lookup (fromRel newRN) $ _tiFieldInfoMap tabInfo of
|
||||
Nothing -> return ()
|
||||
Just _ ->
|
||||
throw400 AlreadyExists $ "cannot rename relationship " <> oldRN
|
||||
|
@ -42,6 +42,7 @@ data PGColMeta
|
||||
, pcmOrdinalPosition :: !Int
|
||||
, pcmDataType :: !PGScalarType
|
||||
, pcmIsNullable :: !Bool
|
||||
, pcmReferences :: ![QualifiedTable]
|
||||
} deriving (Show, Eq)
|
||||
|
||||
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''PGColMeta)
|
||||
@ -87,8 +88,8 @@ data TableDiff
|
||||
= TableDiff
|
||||
{ _tdNewName :: !(Maybe QualifiedTable)
|
||||
, _tdDroppedCols :: ![PGCol]
|
||||
, _tdAddedCols :: ![PGColInfo]
|
||||
, _tdAlteredCols :: ![(PGColInfo, PGColInfo)]
|
||||
, _tdAddedCols :: ![PGRawColInfo]
|
||||
, _tdAlteredCols :: ![(PGRawColInfo, PGRawColInfo)]
|
||||
, _tdDroppedFKeyCons :: ![ConstraintName]
|
||||
-- The final list of uniq/primary constraint names
|
||||
-- used for generating types on_conflict clauses
|
||||
@ -116,8 +117,8 @@ getTableDiff oldtm newtm =
|
||||
|
||||
existingCols = getOverlap pcmOrdinalPosition oldCols newCols
|
||||
|
||||
pcmToPci (PGColMeta colName _ colType isNullable)
|
||||
= PGColInfo colName colType isNullable
|
||||
pcmToPci (PGColMeta colName _ colType isNullable references)
|
||||
= PGRawColInfo colName colType isNullable references
|
||||
|
||||
alteredCols =
|
||||
flip map (filter (uncurry (/=)) existingCols) $ pcmToPci *** pcmToPci
|
||||
@ -137,7 +138,7 @@ getTableDiff oldtm newtm =
|
||||
|
||||
getTableChangeDeps
|
||||
:: (QErrM m, CacheRWM m)
|
||||
=> TableInfo -> TableDiff -> m [SchemaObjId]
|
||||
=> TableInfo PGColInfo -> TableDiff -> m [SchemaObjId]
|
||||
getTableChangeDeps ti tableDiff = do
|
||||
sc <- askSchemaCache
|
||||
-- for all the dropped columns
|
||||
@ -150,7 +151,7 @@ getTableChangeDeps ti tableDiff = do
|
||||
return $ getDependentObjs sc objId
|
||||
return $ droppedConsDeps <> droppedColDeps
|
||||
where
|
||||
tn = tiName ti
|
||||
tn = _tiName ti
|
||||
TableDiff _ droppedCols _ _ droppedFKeyConstraints _ = tableDiff
|
||||
|
||||
data SchemaDiff
|
||||
|
135
server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs
Normal file
135
server/src-lib/Hasura/RQL/DDL/Schema/Enum.hs
Normal file
@ -0,0 +1,135 @@
|
||||
-- | Types and functions for interacting with and manipulating SQL enums represented by
|
||||
-- /single-column tables/, __not__ native Postgres enum types. Native enum types in Postgres are
|
||||
-- difficult to change, so we discourage their use, but we might add support for native enum types
|
||||
-- in the future.
|
||||
module Hasura.RQL.DDL.Schema.Enum (
|
||||
-- * Re-exports from "Hasura.RQL.Types.Column"
|
||||
EnumReference(..)
|
||||
, EnumValues
|
||||
, EnumValueInfo(..)
|
||||
, EnumValue(..)
|
||||
|
||||
-- * Loading enum values
|
||||
, fetchAndValidateEnumValues
|
||||
) where
|
||||
|
||||
import Hasura.Prelude
|
||||
|
||||
import Control.Monad.Validate
|
||||
import Data.List (delete)
|
||||
|
||||
import qualified Data.HashMap.Strict as M
|
||||
import qualified Data.List.NonEmpty as NE
|
||||
import qualified Data.Text as T
|
||||
import qualified Database.PG.Query as Q
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import Hasura.Db
|
||||
import Hasura.GraphQL.Utils
|
||||
import Hasura.RQL.Types.Column
|
||||
import Hasura.RQL.Types.Error
|
||||
import Hasura.SQL.Types
|
||||
|
||||
import qualified Hasura.SQL.DML as S
|
||||
|
||||
data EnumTableIntegrityError
|
||||
= EnumTableMissingPrimaryKey
|
||||
| EnumTableMultiColumnPrimaryKey ![PGCol]
|
||||
| EnumTableNonTextualPrimaryKey !PGRawColInfo
|
||||
| EnumTableNoEnumValues
|
||||
| EnumTableInvalidEnumValueNames !(NE.NonEmpty T.Text)
|
||||
| EnumTableNonTextualCommentColumn !PGRawColInfo
|
||||
| EnumTableTooManyColumns ![PGCol]
|
||||
deriving (Show, Eq)
|
||||
|
||||
fetchAndValidateEnumValues
|
||||
:: (MonadTx m)
|
||||
=> QualifiedTable
|
||||
-> [PGRawColInfo]
|
||||
-> [PGRawColInfo]
|
||||
-> m EnumValues
|
||||
fetchAndValidateEnumValues tableName primaryKeyColumns columnInfos =
|
||||
either (throw400 ConstraintViolation . showErrors) pure =<< runValidateT fetchAndValidate
|
||||
where
|
||||
fetchAndValidate :: (MonadTx m, MonadValidate [EnumTableIntegrityError] m) => m EnumValues
|
||||
fetchAndValidate = do
|
||||
maybePrimaryKey <- tolerate validatePrimaryKey
|
||||
maybeCommentColumn <- validateColumns maybePrimaryKey
|
||||
enumValues <- maybe (refute mempty) (fetchEnumValues maybeCommentColumn) maybePrimaryKey
|
||||
validateEnumValues enumValues
|
||||
pure enumValues
|
||||
where
|
||||
validatePrimaryKey = case primaryKeyColumns of
|
||||
[] -> refute [EnumTableMissingPrimaryKey]
|
||||
[column] -> case prciType column of
|
||||
PGText -> pure column
|
||||
_ -> refute [EnumTableNonTextualPrimaryKey column]
|
||||
_ -> refute [EnumTableMultiColumnPrimaryKey $ map prciName primaryKeyColumns]
|
||||
|
||||
validateColumns primaryKeyColumn = do
|
||||
let nonPrimaryKeyColumns = maybe columnInfos (`delete` columnInfos) primaryKeyColumn
|
||||
case nonPrimaryKeyColumns of
|
||||
[] -> pure Nothing
|
||||
[column] -> case prciType column of
|
||||
PGText -> pure $ Just column
|
||||
_ -> dispute [EnumTableNonTextualCommentColumn column] $> Nothing
|
||||
columns -> dispute [EnumTableTooManyColumns $ map prciName columns] $> Nothing
|
||||
|
||||
fetchEnumValues maybeCommentColumn primaryKeyColumn = do
|
||||
let nullExtr = S.Extractor S.SENull Nothing
|
||||
commentExtr = maybe nullExtr (S.mkExtr . prciName) maybeCommentColumn
|
||||
query = Q.fromBuilder $ toSQL S.mkSelect
|
||||
{ S.selFrom = Just $ S.mkSimpleFromExp tableName
|
||||
, S.selExtr = [S.mkExtr (prciName primaryKeyColumn), commentExtr] }
|
||||
fmap mkEnumValues . liftTx $ Q.withQE defaultTxErrorHandler query () True
|
||||
|
||||
mkEnumValues rows = M.fromList . flip map rows $ \(key, comment) ->
|
||||
(EnumValue key, EnumValueInfo comment)
|
||||
|
||||
validateEnumValues enumValues = do
|
||||
let enumValueNames = map (G.Name . getEnumValue) (M.keys enumValues)
|
||||
when (null enumValueNames) $
|
||||
refute [EnumTableNoEnumValues]
|
||||
let badNames = map G.unName $ filter (not . isValidEnumName) enumValueNames
|
||||
for_ (NE.nonEmpty badNames) $ \someBadNames ->
|
||||
refute [EnumTableInvalidEnumValueNames someBadNames]
|
||||
|
||||
-- https://graphql.github.io/graphql-spec/June2018/#EnumValue
|
||||
isValidEnumName name =
|
||||
isValidName name && name `notElem` ["true", "false", "null"]
|
||||
|
||||
showErrors :: [EnumTableIntegrityError] -> T.Text
|
||||
showErrors allErrors =
|
||||
"the table " <> tableName <<> " cannot be used as an enum " <> reasonsMessage
|
||||
where
|
||||
reasonsMessage = case allErrors of
|
||||
[singleError] -> "because " <> showOne singleError
|
||||
_ -> "for the following reasons:\n" <> T.unlines
|
||||
(map ((" • " <>) . showOne) allErrors)
|
||||
|
||||
showOne :: EnumTableIntegrityError -> T.Text
|
||||
showOne = \case
|
||||
EnumTableMissingPrimaryKey -> "the table must have a primary key"
|
||||
EnumTableMultiColumnPrimaryKey cols ->
|
||||
"the table’s primary key must not span multiple columns ("
|
||||
<> T.intercalate ", " (map dquoteTxt $ sort cols) <> ")"
|
||||
EnumTableNonTextualPrimaryKey colInfo -> typeMismatch "primary key" colInfo PGText
|
||||
EnumTableNoEnumValues -> "the table must have at least one row"
|
||||
EnumTableInvalidEnumValueNames values ->
|
||||
let pluralString = " are not valid GraphQL enum value names"
|
||||
valuesString = case NE.reverse (NE.sort values) of
|
||||
value NE.:| [] -> "value " <> value <<> " is not a valid GraphQL enum value name"
|
||||
value2 NE.:| [value1] -> "values " <> value1 <<> " and " <> value2 <<> pluralString
|
||||
lastValue NE.:| otherValues ->
|
||||
"values " <> T.intercalate ", " (map dquoteTxt $ reverse otherValues) <> ", and "
|
||||
<> lastValue <<> pluralString
|
||||
in "the " <> valuesString
|
||||
EnumTableNonTextualCommentColumn colInfo -> typeMismatch "comment column" colInfo PGText
|
||||
EnumTableTooManyColumns cols ->
|
||||
"the table must have exactly one primary key and optionally one comment column, not "
|
||||
<> T.pack (show $ length cols) <> " columns ("
|
||||
<> T.intercalate ", " (map dquoteTxt $ sort cols) <> ")"
|
||||
where
|
||||
typeMismatch description colInfo expected =
|
||||
"the table’s " <> description <> " (" <> prciName colInfo <<> ") must have type "
|
||||
<> expected <<> ", not type " <>> prciType colInfo
|
@ -1,3 +1,6 @@
|
||||
-- | Functions for mutating the catalog (with integrity checking) to incorporate schema changes
|
||||
-- discovered after applying a user-supplied SQL query. None of these functions modify the schema
|
||||
-- cache, so it must be reloaded after the catalog is updated.
|
||||
module Hasura.RQL.DDL.Schema.Rename
|
||||
( renameTableInCatalog
|
||||
, renameColInCatalog
|
||||
@ -70,7 +73,7 @@ renameTableInCatalog newQT oldQT = do
|
||||
|
||||
renameColInCatalog
|
||||
:: (MonadTx m, CacheRM m)
|
||||
=> PGCol -> PGCol -> QualifiedTable -> TableInfo -> m ()
|
||||
=> PGCol -> PGCol -> QualifiedTable -> TableInfo PGColInfo -> m ()
|
||||
renameColInCatalog oCol nCol qt ti = do
|
||||
sc <- askSchemaCache
|
||||
-- Check if any relation exists with new column name
|
||||
@ -90,7 +93,7 @@ renameColInCatalog oCol nCol qt ti = do
|
||||
where
|
||||
errMsg = "cannot rename column " <> oCol <<> " to " <>> nCol
|
||||
assertFldNotExists =
|
||||
case M.lookup (fromPGCol oCol) $ tiFieldInfoMap ti of
|
||||
case M.lookup (fromPGCol oCol) $ _tiFieldInfoMap ti of
|
||||
Just (FIRelationship _) ->
|
||||
throw400 AlreadyExists $ "cannot rename column " <> oCol
|
||||
<<> " to " <> nCol <<> " in table " <> qt <<>
|
||||
|
@ -16,6 +16,7 @@ import Hasura.RQL.DDL.Permission.Internal
|
||||
import Hasura.RQL.DDL.Relationship
|
||||
import Hasura.RQL.DDL.RemoteSchema
|
||||
import Hasura.RQL.DDL.Schema.Diff
|
||||
import Hasura.RQL.DDL.Schema.Enum
|
||||
import Hasura.RQL.DDL.Schema.Function
|
||||
import Hasura.RQL.DDL.Schema.Rename
|
||||
import Hasura.RQL.DDL.Utils
|
||||
@ -28,6 +29,7 @@ import Hasura.SQL.Types
|
||||
import qualified Database.PG.Query as Q
|
||||
import qualified Hasura.GraphQL.Schema as GS
|
||||
|
||||
import Control.Lens.Extended hiding ((.=))
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.TH
|
||||
@ -48,23 +50,52 @@ delTableFromCatalog (QualifiedObject sn tn) =
|
||||
WHERE table_schema = $1 AND table_name = $2
|
||||
|] (sn, tn) False
|
||||
|
||||
saveTableToCatalog :: QualifiedTable -> Q.Tx ()
|
||||
saveTableToCatalog (QualifiedObject sn tn) =
|
||||
saveTableToCatalog :: TrackTable -> Q.Tx ()
|
||||
saveTableToCatalog (TrackTable (QualifiedObject sn tn) isEnum) =
|
||||
Q.unitQ [Q.sql|
|
||||
INSERT INTO "hdb_catalog"."hdb_table" VALUES ($1, $2)
|
||||
|] (sn, tn) False
|
||||
INSERT INTO "hdb_catalog"."hdb_table" (table_schema, table_name, is_enum)
|
||||
VALUES ($1, $2, $3)
|
||||
|] (sn, tn, isEnum) False
|
||||
|
||||
newtype TrackTable
|
||||
data TrackTable
|
||||
= TrackTable
|
||||
{ tName :: QualifiedTable }
|
||||
deriving (Show, Eq, FromJSON, ToJSON, Lift)
|
||||
{ tName :: !QualifiedTable
|
||||
, tIsEnum :: !Bool
|
||||
} deriving (Show, Eq, Lift)
|
||||
|
||||
instance FromJSON TrackTable where
|
||||
parseJSON v = withOptions <|> withoutOptions
|
||||
where
|
||||
withOptions = flip (withObject "TrackTable") v $ \o -> TrackTable
|
||||
<$> o .: "table"
|
||||
<*> o .:? "is_enum" .!= False
|
||||
withoutOptions = TrackTable <$> parseJSON v <*> pure False
|
||||
|
||||
instance ToJSON TrackTable where
|
||||
toJSON (TrackTable name isEnum)
|
||||
| isEnum = object [ "table" .= name, "is_enum" .= isEnum ]
|
||||
| otherwise = toJSON name
|
||||
|
||||
data SetTableIsEnum
|
||||
= SetTableIsEnum
|
||||
{ stieTable :: !QualifiedTable
|
||||
, stieIsEnum :: !Bool
|
||||
} deriving (Show, Eq, Lift)
|
||||
$(deriveJSON (aesonDrop 4 snakeCase) ''SetTableIsEnum)
|
||||
|
||||
data UntrackTable =
|
||||
UntrackTable
|
||||
{ utTable :: !QualifiedTable
|
||||
, utCascade :: !(Maybe Bool)
|
||||
} deriving (Show, Eq, Lift)
|
||||
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''UntrackTable)
|
||||
|
||||
-- | Track table/view, Phase 1:
|
||||
-- Validate table tracking operation. Fails if table is already being tracked,
|
||||
-- or if a function with the same name is being tracked.
|
||||
trackExistingTableOrViewP1
|
||||
:: (CacheRM m, UserInfoM m, QErrM m) => TrackTable -> m ()
|
||||
trackExistingTableOrViewP1 (TrackTable vn) = do
|
||||
trackExistingTableOrViewP1 TrackTable { tName = vn } = do
|
||||
adminOnly
|
||||
rawSchemaCache <- askSchemaCache
|
||||
when (M.member vn $ scTables rawSchemaCache) $
|
||||
@ -74,42 +105,39 @@ trackExistingTableOrViewP1 (TrackTable vn) = do
|
||||
throw400 NotSupported $ "function with name " <> vn <<> " already exists"
|
||||
|
||||
trackExistingTableOrViewP2
|
||||
:: (QErrM m, CacheRWM m, MonadTx m)
|
||||
=> QualifiedTable -> Bool -> m EncJSON
|
||||
trackExistingTableOrViewP2 vn isSystemDefined = do
|
||||
:: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
|
||||
=> TrackTable -> m EncJSON
|
||||
trackExistingTableOrViewP2 query@TrackTable { tName = tableName } = do
|
||||
sc <- askSchemaCache
|
||||
let defGCtx = scDefaultRemoteGCtx sc
|
||||
GS.checkConflictingNode defGCtx $ GS.qualObjectToName vn
|
||||
GS.checkConflictingNode defGCtx $ GS.qualObjectToName tableName
|
||||
|
||||
tables <- liftTx fetchTableCatalog
|
||||
case tables of
|
||||
[] -> throw400 NotExists $ "no such table/view exists in postgres : " <>> vn
|
||||
[ti] -> addTableToCache ti
|
||||
_ -> throw500 $ "more than one row found for: " <>> vn
|
||||
liftTx $ Q.catchE defaultTxErrorHandler $ saveTableToCatalog vn
|
||||
liftTx $ Q.catchE defaultTxErrorHandler $ saveTableToCatalog query
|
||||
buildSchemaCacheFor (MOTable tableName)
|
||||
|
||||
return successMsg
|
||||
where
|
||||
QualifiedObject sn tn = vn
|
||||
mkTableInfo (cols, pCols, constraints, viewInfoM) =
|
||||
let colMap = M.fromList $ flip map (Q.getAltJ cols) $
|
||||
\c -> (fromPGCol $ pgiName c, FIColumn c)
|
||||
in TableInfo vn isSystemDefined colMap mempty (Q.getAltJ constraints)
|
||||
(Q.getAltJ pCols) (Q.getAltJ viewInfoM) mempty
|
||||
fetchTableCatalog = map mkTableInfo <$>
|
||||
Q.listQE defaultTxErrorHandler [Q.sql|
|
||||
SELECT columns, primary_key_columns,
|
||||
constraints, view_info
|
||||
FROM hdb_catalog.hdb_table_info_agg
|
||||
WHERE table_schema = $1 AND table_name = $2
|
||||
|] (sn, tn) True
|
||||
|
||||
runTrackTableQ
|
||||
:: (QErrM m, CacheRWM m, MonadTx m, UserInfoM m)
|
||||
:: (QErrM m, CacheRWM m, MonadTx m, UserInfoM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
|
||||
=> TrackTable -> m EncJSON
|
||||
runTrackTableQ q = do
|
||||
trackExistingTableOrViewP1 q
|
||||
trackExistingTableOrViewP2 (tName q) False
|
||||
trackExistingTableOrViewP2 q
|
||||
|
||||
runSetExistingTableIsEnumQ
|
||||
:: (QErrM m, CacheRWM m, MonadTx m, UserInfoM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
|
||||
=> SetTableIsEnum -> m EncJSON
|
||||
runSetExistingTableIsEnumQ (SetTableIsEnum tableName isEnum) = do
|
||||
adminOnly
|
||||
void $ askTabInfo tableName -- assert that table is tracked
|
||||
|
||||
liftTx $ Q.unitQE defaultTxErrorHandler [Q.sql|
|
||||
UPDATE "hdb_catalog"."hdb_table" SET is_enum = $3
|
||||
WHERE table_schema = $1 AND table_name = $2
|
||||
|] (qSchema tableName, qName tableName, isEnum) False
|
||||
|
||||
buildSchemaCacheFor (MOTable tableName)
|
||||
return successMsg
|
||||
|
||||
purgeDep :: (CacheRWM m, MonadTx m)
|
||||
=> SchemaObjId -> m ()
|
||||
@ -133,134 +161,6 @@ purgeDep schemaObjId = case schemaObjId of
|
||||
_ -> throw500 $
|
||||
"unexpected dependent object : " <> reportSchemaObj schemaObjId
|
||||
|
||||
processTableChanges :: (MonadTx m, CacheRWM m)
|
||||
=> TableInfo -> TableDiff -> m Bool
|
||||
processTableChanges ti tableDiff = do
|
||||
-- If table rename occurs then don't replace constraints and
|
||||
-- process dropped/added columns, because schema reload happens eventually
|
||||
sc <- askSchemaCache
|
||||
let tn = tiName ti
|
||||
withOldTabName = do
|
||||
-- replace constraints
|
||||
replaceConstraints tn
|
||||
-- for all the dropped columns
|
||||
procDroppedCols tn
|
||||
-- for all added columns
|
||||
procAddedCols tn
|
||||
-- for all altered columns
|
||||
procAlteredCols sc tn
|
||||
|
||||
withNewTabName newTN = do
|
||||
let tnGQL = GS.qualObjectToName newTN
|
||||
defGCtx = scDefaultRemoteGCtx sc
|
||||
-- check for GraphQL schema conflicts on new name
|
||||
GS.checkConflictingNode defGCtx tnGQL
|
||||
void $ procAlteredCols sc tn
|
||||
-- update new table in catalog
|
||||
renameTableInCatalog newTN tn
|
||||
return True
|
||||
|
||||
maybe withOldTabName withNewTabName mNewName
|
||||
|
||||
where
|
||||
TableDiff mNewName droppedCols addedCols alteredCols _ constraints = tableDiff
|
||||
replaceConstraints tn = flip modTableInCache tn $ \tInfo ->
|
||||
return $ tInfo {tiUniqOrPrimConstraints = constraints}
|
||||
|
||||
procDroppedCols tn =
|
||||
forM_ droppedCols $ \droppedCol ->
|
||||
-- Drop the column from the cache
|
||||
delColFromCache droppedCol tn
|
||||
|
||||
procAddedCols tn =
|
||||
-- In the newly added columns check that there is no conflict with relationships
|
||||
forM_ addedCols $ \pci@(PGColInfo colName _ _) ->
|
||||
case M.lookup (fromPGCol colName) $ tiFieldInfoMap ti of
|
||||
Just (FIRelationship _) ->
|
||||
throw400 AlreadyExists $ "cannot add column " <> colName
|
||||
<<> " in table " <> tn <<>
|
||||
" as a relationship with the name already exists"
|
||||
_ -> addColToCache colName pci tn
|
||||
|
||||
procAlteredCols sc tn = fmap or $ forM alteredCols $
|
||||
\( PGColInfo oColName oColTy oNullable
|
||||
, npci@(PGColInfo nColName nColTy nNullable)
|
||||
) ->
|
||||
if | oColName /= nColName -> do
|
||||
renameColInCatalog oColName nColName tn ti
|
||||
return True
|
||||
|
||||
| oColTy /= nColTy -> do
|
||||
let colId = SOTableObj tn $ TOCol oColName
|
||||
typeDepObjs = getDependentObjsWith (== DROnType) sc colId
|
||||
|
||||
-- Raise exception if any objects found which are dependant on column type
|
||||
unless (null typeDepObjs) $ throw400 DependencyError $
|
||||
"cannot change type of column " <> oColName <<> " in table "
|
||||
<> tn <<> " because of the following dependencies : " <>
|
||||
reportSchemaObjs typeDepObjs
|
||||
|
||||
-- Update column type in cache
|
||||
updColInCache nColName npci tn
|
||||
|
||||
-- If any dependant permissions found with the column whose type
|
||||
-- being altered is provided with a session variable,
|
||||
-- then rebuild permission info and update the cache
|
||||
let sessVarDepObjs =
|
||||
getDependentObjsWith (== DRSessionVariable) sc colId
|
||||
forM_ sessVarDepObjs $ \objId ->
|
||||
case objId of
|
||||
SOTableObj qt (TOPerm rn pt) -> rebuildPermInfo qt rn pt
|
||||
_ -> throw500
|
||||
"unexpected schema dependency found for altering column type"
|
||||
return False
|
||||
|
||||
| oNullable /= nNullable -> do
|
||||
updColInCache nColName npci tn
|
||||
return False
|
||||
|
||||
| otherwise -> return False
|
||||
|
||||
delTableAndDirectDeps
|
||||
:: (QErrM m, CacheRWM m, MonadTx m) => QualifiedTable -> m ()
|
||||
delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do
|
||||
liftTx $ Q.catchE defaultTxErrorHandler $ do
|
||||
Q.unitQ [Q.sql|
|
||||
DELETE FROM "hdb_catalog"."hdb_relationship"
|
||||
WHERE table_schema = $1 AND table_name = $2
|
||||
|] (sn, tn) False
|
||||
Q.unitQ [Q.sql|
|
||||
DELETE FROM "hdb_catalog"."hdb_permission"
|
||||
WHERE table_schema = $1 AND table_name = $2
|
||||
|] (sn, tn) False
|
||||
Q.unitQ [Q.sql|
|
||||
DELETE FROM "hdb_catalog"."event_triggers"
|
||||
WHERE schema_name = $1 AND table_name = $2
|
||||
|] (sn, tn) False
|
||||
delTableFromCatalog qtn
|
||||
delTableFromCache qtn
|
||||
|
||||
processSchemaChanges :: (MonadTx m, CacheRWM m) => SchemaDiff -> m Bool
|
||||
processSchemaChanges schemaDiff = do
|
||||
-- Purge the dropped tables
|
||||
mapM_ delTableAndDirectDeps droppedTables
|
||||
|
||||
sc <- askSchemaCache
|
||||
fmap or $ forM alteredTables $ \(oldQtn, tableDiff) -> do
|
||||
ti <- case M.lookup oldQtn $ scTables sc of
|
||||
Just ti -> return ti
|
||||
Nothing -> throw500 $ "old table metadata not found in cache : " <>> oldQtn
|
||||
processTableChanges ti tableDiff
|
||||
where
|
||||
SchemaDiff droppedTables alteredTables = schemaDiff
|
||||
|
||||
data UntrackTable =
|
||||
UntrackTable
|
||||
{ utTable :: !QualifiedTable
|
||||
, utCascade :: !(Maybe Bool)
|
||||
} deriving (Show, Eq, Lift)
|
||||
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''UntrackTable)
|
||||
|
||||
unTrackExistingTableOrViewP1
|
||||
:: (CacheRM m, UserInfoM m, QErrM m) => UntrackTable -> m ()
|
||||
unTrackExistingTableOrViewP1 (UntrackTable vn _) = do
|
||||
@ -269,7 +169,7 @@ unTrackExistingTableOrViewP1 (UntrackTable vn _) = do
|
||||
case M.lookup vn (scTables rawSchemaCache) of
|
||||
Just ti ->
|
||||
-- Check if table/view is system defined
|
||||
when (tiSystemDefined ti) $ throw400 NotSupported $
|
||||
when (_tiSystemDefined ti) $ throw400 NotSupported $
|
||||
vn <<> " is system defined, cannot untrack"
|
||||
Nothing -> throw400 AlreadyUntracked $
|
||||
"view/table already untracked : " <>> vn
|
||||
@ -306,17 +206,220 @@ runUntrackTableQ q = do
|
||||
unTrackExistingTableOrViewP1 q
|
||||
unTrackExistingTableOrViewP2 q
|
||||
|
||||
handleInconsistentObj
|
||||
processTableChanges :: (MonadTx m, CacheRWM m)
|
||||
=> TableInfo PGColInfo -> TableDiff -> m Bool
|
||||
processTableChanges ti tableDiff = do
|
||||
-- If table rename occurs then don't replace constraints and
|
||||
-- process dropped/added columns, because schema reload happens eventually
|
||||
sc <- askSchemaCache
|
||||
let tn = _tiName ti
|
||||
withOldTabName = do
|
||||
replaceConstraints tn
|
||||
procDroppedCols tn
|
||||
procAddedCols tn
|
||||
procAlteredCols sc tn
|
||||
|
||||
withNewTabName newTN = do
|
||||
let tnGQL = GS.qualObjectToName newTN
|
||||
defGCtx = scDefaultRemoteGCtx sc
|
||||
-- check for GraphQL schema conflicts on new name
|
||||
GS.checkConflictingNode defGCtx tnGQL
|
||||
void $ procAlteredCols sc tn
|
||||
-- update new table in catalog
|
||||
renameTableInCatalog newTN tn
|
||||
return True
|
||||
|
||||
maybe withOldTabName withNewTabName mNewName
|
||||
|
||||
where
|
||||
TableDiff mNewName droppedCols addedCols alteredCols _ constraints = tableDiff
|
||||
replaceConstraints tn = flip modTableInCache tn $ \tInfo ->
|
||||
return $ tInfo {_tiUniqOrPrimConstraints = constraints}
|
||||
|
||||
procDroppedCols tn =
|
||||
forM_ droppedCols $ \droppedCol ->
|
||||
-- Drop the column from the cache
|
||||
delColFromCache droppedCol tn
|
||||
|
||||
procAddedCols tn =
|
||||
-- In the newly added columns check that there is no conflict with relationships
|
||||
forM_ addedCols $ \rawInfo@(PGRawColInfo colName _ _ _) ->
|
||||
case M.lookup (fromPGCol colName) $ _tiFieldInfoMap ti of
|
||||
Just (FIRelationship _) ->
|
||||
throw400 AlreadyExists $ "cannot add column " <> colName
|
||||
<<> " in table " <> tn <<>
|
||||
" as a relationship with the name already exists"
|
||||
_ -> do
|
||||
info <- processColumnInfoUsingCache tn rawInfo
|
||||
addColToCache colName info tn
|
||||
|
||||
procAlteredCols sc tn = fmap or $ forM alteredCols $
|
||||
\( PGRawColInfo oldName oldType _ _
|
||||
, newRawInfo@(PGRawColInfo newName newType _ _) ) -> do
|
||||
let performColumnUpdate = do
|
||||
newInfo <- processColumnInfoUsingCache tn newRawInfo
|
||||
updColInCache newName newInfo tn
|
||||
|
||||
if | oldName /= newName -> renameColInCatalog oldName newName tn ti $> True
|
||||
|
||||
| oldType /= newType -> do
|
||||
let colId = SOTableObj tn $ TOCol oldName
|
||||
typeDepObjs = getDependentObjsWith (== DROnType) sc colId
|
||||
|
||||
unless (null typeDepObjs) $ throw400 DependencyError $
|
||||
"cannot change type of column " <> oldName <<> " in table "
|
||||
<> tn <<> " because of the following dependencies : " <>
|
||||
reportSchemaObjs typeDepObjs
|
||||
|
||||
performColumnUpdate
|
||||
|
||||
-- If any dependent permissions found with the column whose type being altered is
|
||||
-- provided with a session variable, then rebuild permission info and update the cache
|
||||
let sessVarDepObjs = getDependentObjsWith (== DRSessionVariable) sc colId
|
||||
forM_ sessVarDepObjs $ \case
|
||||
SOTableObj qt (TOPerm rn pt) -> rebuildPermInfo qt rn pt
|
||||
_ -> throw500 "unexpected schema dependency found for altering column type"
|
||||
pure False
|
||||
|
||||
| otherwise -> performColumnUpdate $> False
|
||||
|
||||
delTableAndDirectDeps
|
||||
:: (QErrM m, CacheRWM m, MonadTx m) => QualifiedTable -> m ()
|
||||
delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do
|
||||
liftTx $ Q.catchE defaultTxErrorHandler $ do
|
||||
Q.unitQ [Q.sql|
|
||||
DELETE FROM "hdb_catalog"."hdb_relationship"
|
||||
WHERE table_schema = $1 AND table_name = $2
|
||||
|] (sn, tn) False
|
||||
Q.unitQ [Q.sql|
|
||||
DELETE FROM "hdb_catalog"."hdb_permission"
|
||||
WHERE table_schema = $1 AND table_name = $2
|
||||
|] (sn, tn) False
|
||||
Q.unitQ [Q.sql|
|
||||
DELETE FROM "hdb_catalog"."event_triggers"
|
||||
WHERE schema_name = $1 AND table_name = $2
|
||||
|] (sn, tn) False
|
||||
delTableFromCatalog qtn
|
||||
delTableFromCache qtn
|
||||
|
||||
processSchemaChanges :: (MonadTx m, CacheRWM m) => SchemaDiff -> m Bool
|
||||
processSchemaChanges schemaDiff = do
|
||||
-- Purge the dropped tables
|
||||
mapM_ delTableAndDirectDeps droppedTables
|
||||
|
||||
sc <- askSchemaCache
|
||||
fmap or $ forM alteredTables $ \(oldQtn, tableDiff) -> do
|
||||
ti <- case M.lookup oldQtn $ scTables sc of
|
||||
Just ti -> return ti
|
||||
Nothing -> throw500 $ "old table metadata not found in cache : " <>> oldQtn
|
||||
processTableChanges ti tableDiff
|
||||
where
|
||||
SchemaDiff droppedTables alteredTables = schemaDiff
|
||||
|
||||
-- | Builds an initial @'TableCache' 'PGColInfo'@ from catalog information. Does not fill in
|
||||
-- '_tiRolePermInfoMap' or '_tiEventTriggerInfoMap' at all, and '_tiFieldInfoMap' only contains
|
||||
-- columns, not relationships; those pieces of information are filled in by later stages.
|
||||
buildTableCache
|
||||
:: forall m. (MonadTx m, CacheRWM m)
|
||||
=> [CatalogTable] -> m (TableCache PGColInfo)
|
||||
buildTableCache = processTableCache <=< buildRawTableCache
|
||||
where
|
||||
withTable name = withSchemaObject $
|
||||
InconsistentMetadataObj (MOTable name) MOTTable (toJSON name)
|
||||
|
||||
-- Step 1: Build the raw table cache from metadata information.
|
||||
buildRawTableCache :: [CatalogTable] -> m (TableCache PGRawColInfo)
|
||||
buildRawTableCache catalogTables = fmap (M.fromList . catMaybes) . for catalogTables $
|
||||
\(CatalogTable name isSystemDefined isEnum maybeInfo) -> withTable name $ do
|
||||
catalogInfo <- onNothing maybeInfo $
|
||||
throw400 NotExists $ "no such table/view exists in postgres: " <>> name
|
||||
|
||||
let CatalogTableInfo columns constraints primaryKeyColumnNames viewInfo = catalogInfo
|
||||
columnFields = M.fromList . flip map columns $ \column ->
|
||||
(fromPGCol $ prciName column, FIColumn column)
|
||||
|
||||
primaryKeyColumns = flip filter columns $ \column ->
|
||||
prciName column `elem` primaryKeyColumnNames
|
||||
fetchEnumValues = fetchAndValidateEnumValues name primaryKeyColumns columns
|
||||
|
||||
maybeEnumValues <- if isEnum then Just <$> fetchEnumValues else pure Nothing
|
||||
|
||||
let info = TableInfo
|
||||
{ _tiName = name
|
||||
, _tiSystemDefined = isSystemDefined
|
||||
, _tiFieldInfoMap = columnFields
|
||||
, _tiRolePermInfoMap = mempty
|
||||
, _tiUniqOrPrimConstraints = constraints
|
||||
, _tiPrimaryKeyCols = primaryKeyColumnNames
|
||||
, _tiViewInfo = viewInfo
|
||||
, _tiEventTriggerInfoMap = mempty
|
||||
, _tiEnumValues = maybeEnumValues }
|
||||
pure (name, info)
|
||||
|
||||
-- Step 2: Process the raw table cache to replace Postgres column types with logical column
|
||||
-- types.
|
||||
processTableCache :: TableCache PGRawColInfo -> m (TableCache PGColInfo)
|
||||
processTableCache rawTables = fmap (M.mapMaybe id) . for rawTables $ \rawInfo -> do
|
||||
let tableName = _tiName rawInfo
|
||||
withTable tableName $ rawInfo
|
||||
& tiFieldInfoMap.traverse._FIColumn %%~ processColumnInfo enumTables tableName
|
||||
where
|
||||
enumTables = M.mapMaybe _tiEnumValues rawTables
|
||||
|
||||
-- | “Processes” a 'PGRawColInfo' into a 'PGColInfo' by resolving its type using a map of known
|
||||
-- enum tables.
|
||||
processColumnInfo
|
||||
:: (QErrM m)
|
||||
=> M.HashMap QualifiedTable EnumValues -- ^ known enum tables
|
||||
-> QualifiedTable -- ^ the table this column belongs to
|
||||
-> PGRawColInfo -- ^ the column’s raw information
|
||||
-> m PGColInfo
|
||||
processColumnInfo enumTables tableName rawInfo = do
|
||||
resolvedType <- resolveColumnType
|
||||
pure PGColInfo
|
||||
{ pgiName = prciName rawInfo
|
||||
, pgiType = resolvedType
|
||||
, pgiIsNullable = prciIsNullable rawInfo }
|
||||
where
|
||||
resolveColumnType =
|
||||
case prciReferences rawInfo of
|
||||
-- no referenced tables? definitely not an enum
|
||||
[] -> pure $ PGColumnScalar (prciType rawInfo)
|
||||
|
||||
-- one referenced table? might be an enum, so check if the referenced table is an enum
|
||||
[referencedTableName] -> pure $ M.lookup referencedTableName enumTables & maybe
|
||||
(PGColumnScalar $ prciType rawInfo)
|
||||
(PGColumnEnumReference . EnumReference referencedTableName)
|
||||
|
||||
-- multiple referenced tables? we could check if any of them are enums, but the schema is
|
||||
-- strange, so let’s just reject it
|
||||
referencedTables -> throw400 ConstraintViolation
|
||||
$ "cannot handle exotic schema: column " <> prciName rawInfo <<> " in table "
|
||||
<> tableName <<> " references multiple foreign tables ("
|
||||
<> T.intercalate ", " (map dquote referencedTables) <> ")?"
|
||||
|
||||
-- | Like 'processColumnInfo', but uses the information in the current schema cache to resolve a
|
||||
-- column’s type.
|
||||
processColumnInfoUsingCache :: (CacheRM m, QErrM m) => QualifiedTable -> PGRawColInfo -> m PGColInfo
|
||||
processColumnInfoUsingCache tableName rawInfo = do
|
||||
tables <- scTables <$> askSchemaCache
|
||||
processColumnInfo (M.mapMaybe _tiEnumValues tables) tableName rawInfo
|
||||
|
||||
withSchemaObject
|
||||
:: (QErrM m, CacheRWM m)
|
||||
=> (T.Text -> InconsistentMetadataObj)
|
||||
-> m ()
|
||||
-> m ()
|
||||
handleInconsistentObj f action =
|
||||
action `catchError` \err -> do
|
||||
-> m a
|
||||
-> m (Maybe a)
|
||||
withSchemaObject f action =
|
||||
(Just <$> action) `catchError` \err -> do
|
||||
sc <- askSchemaCache
|
||||
let inconsObj = f $ qeError err
|
||||
allInconsObjs = inconsObj:scInconsistentObjs sc
|
||||
writeSchemaCache $ sc{scInconsistentObjs = allInconsObjs}
|
||||
writeSchemaCache sc { scInconsistentObjs = allInconsObjs }
|
||||
pure Nothing
|
||||
|
||||
withSchemaObject_ :: (QErrM m, CacheRWM m) => (T.Text -> InconsistentMetadataObj) -> m () -> m ()
|
||||
withSchemaObject_ f = void . withSchemaObject f
|
||||
|
||||
checkNewInconsistentMeta
|
||||
:: (QErrM m)
|
||||
@ -344,6 +447,26 @@ buildSchemaCacheStrict = do
|
||||
let err = err400 Unexpected "cannot continue due to inconsistent metadata"
|
||||
throwError err{qeInternal = Just $ toJSON inconsObjs}
|
||||
|
||||
-- | Rebuilds the schema cache. If an object with the given object id became newly inconsistent,
|
||||
-- raises an error about it specifically. Otherwise, raises a generic metadata inconsistency error.
|
||||
buildSchemaCacheFor
|
||||
:: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
|
||||
=> MetadataObjId -> m ()
|
||||
buildSchemaCacheFor objectId = do
|
||||
oldSchemaCache <- askSchemaCache
|
||||
buildSchemaCache
|
||||
newSchemaCache <- askSchemaCache
|
||||
|
||||
let diffInconsistentObjects = getDifference _moId `on` scInconsistentObjs
|
||||
newInconsistentObjects = newSchemaCache `diffInconsistentObjects` oldSchemaCache
|
||||
|
||||
for_ (find ((== objectId) . _moId) newInconsistentObjects) $ \matchingObject ->
|
||||
throw400 ConstraintViolation (_moReason matchingObject)
|
||||
|
||||
unless (null newInconsistentObjects) $
|
||||
throwError (err400 Unexpected "cannot continue due to new inconsistent metadata")
|
||||
{ qeInternal = Just $ toJSON newInconsistentObjects }
|
||||
|
||||
buildSchemaCache
|
||||
:: (MonadTx m, CacheRWM m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
|
||||
=> m ()
|
||||
@ -372,17 +495,7 @@ buildSchemaCacheG withSetup = do
|
||||
let fkeys = HS.fromList fkeys'
|
||||
|
||||
-- tables
|
||||
forM_ tables $ \ct -> do
|
||||
let qt = _ctTable ct
|
||||
isSysDef = _ctSystemDefined ct
|
||||
tableInfoM = _ctInfo ct
|
||||
mkInconsObj = InconsistentMetadataObj (MOTable qt)
|
||||
MOTTable $ toJSON $ TrackTable qt
|
||||
modifyErr (\e -> "table " <> qt <<> "; " <> e) $
|
||||
handleInconsistentObj mkInconsObj $ do
|
||||
ti <- onNothing tableInfoM $ throw400 NotExists $
|
||||
"no such table/view exists in postgres : " <>> qt
|
||||
addTableToCache $ ti{tiSystemDefined = isSysDef}
|
||||
modTableCache =<< buildTableCache tables
|
||||
|
||||
-- relationships
|
||||
forM_ relationships $ \(CatalogRelation qt rn rt rDef cmnt) -> do
|
||||
@ -390,7 +503,7 @@ buildSchemaCacheG withSetup = do
|
||||
def = toJSON $ WithTable qt $ RelDef rn rDef cmnt
|
||||
mkInconsObj = InconsistentMetadataObj objId (MOTRel rt) def
|
||||
modifyErr (\e -> "table " <> qt <<> "; rel " <> rn <<> "; " <> e) $
|
||||
handleInconsistentObj mkInconsObj $
|
||||
withSchemaObject_ mkInconsObj $
|
||||
case rt of
|
||||
ObjRel -> do
|
||||
using <- decodeValue rDef
|
||||
@ -409,7 +522,7 @@ buildSchemaCacheG withSetup = do
|
||||
def = toJSON $ WithTable qt $ PermDef rn pDef cmnt
|
||||
mkInconsObj = InconsistentMetadataObj objId (MOTPerm pt) def
|
||||
modifyErr (\e -> "table " <> qt <<> "; role " <> rn <<> "; " <> e) $
|
||||
handleInconsistentObj mkInconsObj $
|
||||
withSchemaObject_ mkInconsObj $
|
||||
case pt of
|
||||
PTInsert -> permHelper withSetup sqlGenCtx qt rn pDef PAInsert
|
||||
PTSelect -> permHelper withSetup sqlGenCtx qt rn pDef PASelect
|
||||
@ -421,10 +534,10 @@ buildSchemaCacheG withSetup = do
|
||||
let objId = MOTableObj qt $ MTOTrigger trn
|
||||
def = object ["table" .= qt, "configuration" .= configuration]
|
||||
mkInconsObj = InconsistentMetadataObj objId MOTEventTrigger def
|
||||
handleInconsistentObj mkInconsObj $ do
|
||||
withSchemaObject_ mkInconsObj $ do
|
||||
etc <- decodeValue configuration
|
||||
subTableP2Setup qt etc
|
||||
allCols <- getCols . tiFieldInfoMap <$> askTabInfo qt
|
||||
allCols <- getCols . _tiFieldInfoMap <$> askTabInfo qt
|
||||
when withSetup $ liftTx $
|
||||
mkAllTriggersQ trn qt allCols (stringifyNum sqlGenCtx) (etcDefinition etc)
|
||||
|
||||
@ -434,7 +547,7 @@ buildSchemaCacheG withSetup = do
|
||||
mkInconsObj =
|
||||
InconsistentMetadataObj (MOFunction qf) MOTFunction def
|
||||
modifyErr (\e -> "function " <> qf <<> "; " <> e) $
|
||||
handleInconsistentObj mkInconsObj $ do
|
||||
withSchemaObject_ mkInconsObj $ do
|
||||
rawfi <- onNothing rawfiM $
|
||||
throw400 NotExists $ "no such function exists in postgres : " <>> qf
|
||||
trackFunctionP2Setup qf rawfi
|
||||
@ -463,7 +576,7 @@ buildSchemaCacheG withSetup = do
|
||||
let AddRemoteSchemaQuery name _ _ = rs
|
||||
mkInconsObj = InconsistentMetadataObj (MORemoteSchema name)
|
||||
MOTRemoteSchema (toJSON rs)
|
||||
handleInconsistentObj mkInconsObj $ do
|
||||
withSchemaObject_ mkInconsObj $ do
|
||||
rsCtx <- addRemoteSchemaP2Setup rs
|
||||
sc <- askSchemaCache
|
||||
let gCtxMap = scGCtxMap sc
|
||||
@ -475,11 +588,6 @@ buildSchemaCacheG withSetup = do
|
||||
, scDefaultRemoteGCtx = mergedDefGCtx
|
||||
}
|
||||
|
||||
fetchCatalogData :: Q.TxE QErr CatalogMetadata
|
||||
fetchCatalogData =
|
||||
(Q.getAltJ . runIdentity . Q.getRow) <$> Q.withQE defaultTxErrorHandler
|
||||
$(Q.sqlFromFile "src-rsr/catalog_metadata.sql") () True
|
||||
|
||||
data RunSQL
|
||||
= RunSQL
|
||||
{ rSql :: T.Text
|
||||
@ -517,7 +625,6 @@ execWithMDCheck
|
||||
:: (QErrM m, CacheRWM m, MonadTx m, MonadIO m, HasHttpManager m, HasSQLGenCtx m)
|
||||
=> RunSQL -> m EncJSON
|
||||
execWithMDCheck (RunSQL t cascade _) = do
|
||||
|
||||
-- Drop hdb_views so no interference is caused to the sql query
|
||||
liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews
|
||||
|
||||
@ -582,16 +689,16 @@ execWithMDCheck (RunSQL t cascade _) = do
|
||||
postSc <- askSchemaCache
|
||||
-- recreate the insert permission infra
|
||||
forM_ (M.elems $ scTables postSc) $ \ti -> do
|
||||
let tn = tiName ti
|
||||
forM_ (M.elems $ tiRolePermInfoMap ti) $ \rpi ->
|
||||
let tn = _tiName ti
|
||||
forM_ (M.elems $ _tiRolePermInfoMap ti) $ \rpi ->
|
||||
maybe (return ()) (liftTx . buildInsInfra tn) $ _permIns rpi
|
||||
|
||||
strfyNum <- stringifyNum <$> askSQLGenCtx
|
||||
--recreate triggers
|
||||
forM_ (M.elems $ scTables postSc) $ \ti -> do
|
||||
let tn = tiName ti
|
||||
cols = getCols $ tiFieldInfoMap ti
|
||||
forM_ (M.toList $ tiEventTriggerInfoMap ti) $ \(trn, eti) -> do
|
||||
let tn = _tiName ti
|
||||
cols = getCols $ _tiFieldInfoMap ti
|
||||
forM_ (M.toList $ _tiEventTriggerInfoMap ti) $ \(trn, eti) -> do
|
||||
let fullspec = etiOpsDef eti
|
||||
liftTx $ mkAllTriggersQ trn tn cols strfyNum fullspec
|
||||
|
||||
|
@ -63,7 +63,7 @@ mkSQLCount (CountQueryP1 tn (permFltr, mWc) mDistCols) =
|
||||
validateCountQWith
|
||||
:: (UserInfoM m, QErrM m, CacheRM m)
|
||||
=> SessVarBldr m
|
||||
-> (PGScalarType -> Value -> m S.SQLExp)
|
||||
-> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> CountQuery
|
||||
-> m CountQueryP1
|
||||
validateCountQWith sessVarBldr prepValBldr (CountQuery qt mDistCols mWhere) = do
|
||||
@ -73,7 +73,7 @@ validateCountQWith sessVarBldr prepValBldr (CountQuery qt mDistCols mWhere) = do
|
||||
selPerm <- modifyErr (<> selNecessaryMsg) $
|
||||
askSelPermInfo tableInfo
|
||||
|
||||
let colInfoMap = tiFieldInfoMap tableInfo
|
||||
let colInfoMap = _tiFieldInfoMap tableInfo
|
||||
|
||||
forM_ mDistCols $ \distCols -> do
|
||||
let distColAsrns = [ checkSelOnCol selPerm
|
||||
|
@ -60,7 +60,7 @@ mkDeleteCTE (AnnDel tn (fltr, wc) _ _) =
|
||||
validateDeleteQWith
|
||||
:: (UserInfoM m, QErrM m, CacheRM m)
|
||||
=> SessVarBldr m
|
||||
-> (PGScalarType -> Value -> m S.SQLExp)
|
||||
-> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> DeleteQuery
|
||||
-> m AnnDel
|
||||
validateDeleteQWith sessVarBldr prepValBldr
|
||||
@ -69,7 +69,7 @@ validateDeleteQWith sessVarBldr prepValBldr
|
||||
|
||||
-- If table is view then check if it deletable
|
||||
mutableView tableName viIsDeletable
|
||||
(tiViewInfo tableInfo) "deletable"
|
||||
(_tiViewInfo tableInfo) "deletable"
|
||||
|
||||
-- Check if the role has delete permissions
|
||||
delPerm <- askDelPermInfo tableInfo
|
||||
@ -81,7 +81,7 @@ validateDeleteQWith sessVarBldr prepValBldr
|
||||
selPerm <- modifyErr (<> selNecessaryMsg) $
|
||||
askSelPermInfo tableInfo
|
||||
|
||||
let fieldInfoMap = tiFieldInfoMap tableInfo
|
||||
let fieldInfoMap = _tiFieldInfoMap tableInfo
|
||||
allCols = getCols fieldInfoMap
|
||||
|
||||
-- convert the returning cols into sql returing exp
|
||||
|
@ -64,10 +64,10 @@ toSQLConflict conflict = case conflict of
|
||||
|
||||
convObj
|
||||
:: (UserInfoM m, QErrM m)
|
||||
=> (PGScalarType -> Value -> m S.SQLExp)
|
||||
=> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> HM.HashMap PGCol S.SQLExp
|
||||
-> HM.HashMap PGCol S.SQLExp
|
||||
-> FieldInfoMap
|
||||
-> FieldInfoMap PGColInfo
|
||||
-> InsObj
|
||||
-> m ([PGCol], [S.SQLExp])
|
||||
convObj prepFn defInsVals setInsVals fieldInfoMap insObj = do
|
||||
@ -99,7 +99,7 @@ validateInpCols inpCols updColsPerm = forM_ inpCols $ \inpCol ->
|
||||
buildConflictClause
|
||||
:: (UserInfoM m, QErrM m)
|
||||
=> SessVarBldr m
|
||||
-> TableInfo
|
||||
-> TableInfo PGColInfo
|
||||
-> [PGCol]
|
||||
-> OnConflict
|
||||
-> m ConflictClauseP1
|
||||
@ -131,8 +131,8 @@ buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act)
|
||||
(Just _, Just _, _) -> throw400 UnexpectedPayload
|
||||
"'constraint' and 'constraint_on' cannot be set at a time"
|
||||
where
|
||||
fieldInfoMap = tiFieldInfoMap tableInfo
|
||||
toSQLBool = toSQLBoolExp (S.mkQual $ tiName tableInfo)
|
||||
fieldInfoMap = _tiFieldInfoMap tableInfo
|
||||
toSQLBool = toSQLBoolExp (S.mkQual $ _tiName tableInfo)
|
||||
|
||||
validateCols c = do
|
||||
let targetcols = getPGCols c
|
||||
@ -140,11 +140,11 @@ buildConflictClause sessVarBldr tableInfo inpCols (OnConflict mTCol mTCons act)
|
||||
\pgCol -> askPGType fieldInfoMap pgCol ""
|
||||
|
||||
validateConstraint c = do
|
||||
let tableConsNames = tiUniqOrPrimConstraints tableInfo
|
||||
let tableConsNames = _tiUniqOrPrimConstraints tableInfo
|
||||
withPathK "constraint" $
|
||||
unless (c `elem` tableConsNames) $
|
||||
throw400 Unexpected $ "constraint " <> getConstraintTxt c
|
||||
<<> " for table " <> tiName tableInfo
|
||||
<<> " for table " <> _tiName tableInfo
|
||||
<<> " does not exist"
|
||||
|
||||
getUpdPerm = do
|
||||
@ -160,7 +160,7 @@ convInsertQuery
|
||||
:: (UserInfoM m, QErrM m, CacheRM m)
|
||||
=> (Value -> m [InsObj])
|
||||
-> SessVarBldr m
|
||||
-> (PGScalarType -> Value -> m S.SQLExp)
|
||||
-> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> InsertQuery
|
||||
-> m InsertQueryP1
|
||||
convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRetCols) = do
|
||||
@ -172,7 +172,7 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRet
|
||||
|
||||
-- If table is view then check if it is insertable
|
||||
mutableView tableName viIsInsertable
|
||||
(tiViewInfo tableInfo) "insertable"
|
||||
(_tiViewInfo tableInfo) "insertable"
|
||||
|
||||
-- Check if the role has insert permissions
|
||||
insPerm <- askInsPermInfo tableInfo
|
||||
@ -180,7 +180,7 @@ convInsertQuery objsParser sessVarBldr prepFn (InsertQuery tableName val oC mRet
|
||||
-- Check if all dependent headers are present
|
||||
validateHeaders $ ipiRequiredHeaders insPerm
|
||||
|
||||
let fieldInfoMap = tiFieldInfoMap tableInfo
|
||||
let fieldInfoMap = _tiFieldInfoMap tableInfo
|
||||
setInsVals = ipiSet insPerm
|
||||
|
||||
-- convert the returning cols into sql returing exp
|
||||
|
@ -41,13 +41,13 @@ instance UserInfoM DMLP1 where
|
||||
instance HasSQLGenCtx DMLP1 where
|
||||
askSQLGenCtx = DMLP1 $ lift askSQLGenCtx
|
||||
|
||||
mkAdminRolePermInfo :: TableInfo -> RolePermInfo
|
||||
mkAdminRolePermInfo :: TableInfo PGColInfo -> RolePermInfo
|
||||
mkAdminRolePermInfo ti =
|
||||
RolePermInfo (Just i) (Just s) (Just u) (Just d)
|
||||
where
|
||||
pgCols = map pgiName $ getCols $ tiFieldInfoMap ti
|
||||
pgCols = map pgiName $ getCols $ _tiFieldInfoMap ti
|
||||
|
||||
tn = tiName ti
|
||||
tn = _tiName ti
|
||||
i = InsPermInfo (HS.fromList pgCols) tn annBoolExpTrue M.empty []
|
||||
s = SelPermInfo (HS.fromList pgCols) tn annBoolExpTrue
|
||||
Nothing True []
|
||||
@ -57,14 +57,14 @@ mkAdminRolePermInfo ti =
|
||||
askPermInfo'
|
||||
:: (UserInfoM m)
|
||||
=> PermAccessor c
|
||||
-> TableInfo
|
||||
-> TableInfo PGColInfo
|
||||
-> m (Maybe c)
|
||||
askPermInfo' pa tableInfo = do
|
||||
roleName <- askCurRole
|
||||
let mrpi = getRolePermInfo roleName
|
||||
return $ mrpi >>= (^. permAccToLens pa)
|
||||
where
|
||||
rpim = tiRolePermInfoMap tableInfo
|
||||
rpim = _tiRolePermInfoMap tableInfo
|
||||
getRolePermInfo roleName
|
||||
| roleName == adminRole = Just $ mkAdminRolePermInfo tableInfo
|
||||
| otherwise = M.lookup roleName rpim
|
||||
@ -72,7 +72,7 @@ askPermInfo' pa tableInfo = do
|
||||
askPermInfo
|
||||
:: (UserInfoM m, QErrM m)
|
||||
=> PermAccessor c
|
||||
-> TableInfo
|
||||
-> TableInfo PGColInfo
|
||||
-> m c
|
||||
askPermInfo pa tableInfo = do
|
||||
roleName <- askCurRole
|
||||
@ -80,38 +80,38 @@ askPermInfo pa tableInfo = do
|
||||
case mPermInfo of
|
||||
Just c -> return c
|
||||
Nothing -> throw400 PermissionDenied $ mconcat
|
||||
[ pt <> " on " <>> tiName tableInfo
|
||||
[ pt <> " on " <>> _tiName tableInfo
|
||||
, " for role " <>> roleName
|
||||
, " is not allowed. "
|
||||
]
|
||||
where
|
||||
pt = permTypeToCode $ permAccToType pa
|
||||
|
||||
isTabUpdatable :: RoleName -> TableInfo -> Bool
|
||||
isTabUpdatable :: RoleName -> TableInfo PGColInfo -> Bool
|
||||
isTabUpdatable role ti
|
||||
| role == adminRole = True
|
||||
| otherwise = isJust $ M.lookup role rpim >>= _permUpd
|
||||
where
|
||||
rpim = tiRolePermInfoMap ti
|
||||
rpim = _tiRolePermInfoMap ti
|
||||
|
||||
askInsPermInfo
|
||||
:: (UserInfoM m, QErrM m)
|
||||
=> TableInfo -> m InsPermInfo
|
||||
=> TableInfo PGColInfo -> m InsPermInfo
|
||||
askInsPermInfo = askPermInfo PAInsert
|
||||
|
||||
askSelPermInfo
|
||||
:: (UserInfoM m, QErrM m)
|
||||
=> TableInfo -> m SelPermInfo
|
||||
=> TableInfo PGColInfo -> m SelPermInfo
|
||||
askSelPermInfo = askPermInfo PASelect
|
||||
|
||||
askUpdPermInfo
|
||||
:: (UserInfoM m, QErrM m)
|
||||
=> TableInfo -> m UpdPermInfo
|
||||
=> TableInfo PGColInfo -> m UpdPermInfo
|
||||
askUpdPermInfo = askPermInfo PAUpdate
|
||||
|
||||
askDelPermInfo
|
||||
:: (UserInfoM m, QErrM m)
|
||||
=> TableInfo -> m DelPermInfo
|
||||
=> TableInfo PGColInfo -> m DelPermInfo
|
||||
askDelPermInfo = askPermInfo PADelete
|
||||
|
||||
verifyAsrns :: (MonadError QErr m) => [a -> m ()] -> [a] -> m ()
|
||||
@ -142,27 +142,27 @@ checkPermOnCol pt allowedCols pgCol = do
|
||||
]
|
||||
|
||||
binRHSBuilder
|
||||
:: PGScalarType -> Value -> DMLP1 S.SQLExp
|
||||
:: PGColumnType -> Value -> DMLP1 S.SQLExp
|
||||
binRHSBuilder colType val = do
|
||||
preparedArgs <- get
|
||||
binVal <- runAesonParser (convToBin colType) val
|
||||
put (preparedArgs DS.|> binVal)
|
||||
return $ toPrepParam (DS.length preparedArgs + 1) colType
|
||||
scalarValue <- parsePGScalarValue colType val
|
||||
put (preparedArgs DS.|> toBinaryValue scalarValue)
|
||||
return $ toPrepParam (DS.length preparedArgs + 1) (pstType scalarValue)
|
||||
|
||||
fetchRelTabInfo
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> QualifiedTable
|
||||
-> m TableInfo
|
||||
-> m (TableInfo PGColInfo)
|
||||
fetchRelTabInfo refTabName =
|
||||
-- Internal error
|
||||
modifyErrAndSet500 ("foreign " <> ) $ askTabInfo refTabName
|
||||
|
||||
type SessVarBldr m = PgType -> SessVar -> m S.SQLExp
|
||||
type SessVarBldr m = PGType PGScalarType -> SessVar -> m S.SQLExp
|
||||
|
||||
fetchRelDet
|
||||
:: (UserInfoM m, QErrM m, CacheRM m)
|
||||
=> RelName -> QualifiedTable
|
||||
-> m (FieldInfoMap, SelPermInfo)
|
||||
-> m (FieldInfoMap PGColInfo, SelPermInfo)
|
||||
fetchRelDet relName refTabName = do
|
||||
roleName <- askCurRole
|
||||
-- Internal error
|
||||
@ -171,7 +171,7 @@ fetchRelDet relName refTabName = do
|
||||
refSelPerm <- modifyErr (relPermErr refTabName roleName) $
|
||||
askSelPermInfo refTabInfo
|
||||
|
||||
return (tiFieldInfoMap refTabInfo, refSelPerm)
|
||||
return (_tiFieldInfoMap refTabInfo, refSelPerm)
|
||||
where
|
||||
relPermErr rTable roleName _ =
|
||||
mconcat
|
||||
@ -215,16 +215,16 @@ convPartialSQLExp f = \case
|
||||
PSESessVar colTy sessVar -> f colTy sessVar
|
||||
|
||||
sessVarFromCurrentSetting
|
||||
:: (Applicative f) => PgType -> SessVar -> f S.SQLExp
|
||||
:: (Applicative f) => PGType PGScalarType -> SessVar -> f S.SQLExp
|
||||
sessVarFromCurrentSetting pgType sessVar =
|
||||
pure $ sessVarFromCurrentSetting' pgType sessVar
|
||||
|
||||
sessVarFromCurrentSetting' :: PgType -> SessVar -> S.SQLExp
|
||||
sessVarFromCurrentSetting' :: PGType PGScalarType -> SessVar -> S.SQLExp
|
||||
sessVarFromCurrentSetting' ty sessVar =
|
||||
flip S.SETyAnn (S.mkTypeAnn ty) $
|
||||
case ty of
|
||||
PgTypeSimple baseTy -> withGeoVal baseTy sessVarVal
|
||||
PgTypeArray _ -> sessVarVal
|
||||
PGTypeSimple baseTy -> withGeoVal baseTy sessVarVal
|
||||
PGTypeArray _ -> sessVarVal
|
||||
where
|
||||
curSess = S.SEUnsafe "current_setting('hasura.user')::json"
|
||||
sessVarVal = S.SEOpApp (S.SQLOp "->>")
|
||||
@ -241,23 +241,25 @@ checkSelPerm spi sessVarBldr =
|
||||
|
||||
convBoolExp
|
||||
:: (UserInfoM m, QErrM m, CacheRM m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap PGColInfo
|
||||
-> SelPermInfo
|
||||
-> BoolExp
|
||||
-> SessVarBldr m
|
||||
-> (PGScalarType -> Value -> m S.SQLExp)
|
||||
-> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> m AnnBoolExpSQL
|
||||
convBoolExp cim spi be sessVarBldr prepValBldr = do
|
||||
abe <- annBoolExp rhsParser cim be
|
||||
checkSelPerm spi sessVarBldr abe
|
||||
where
|
||||
rhsParser pgType val = case pgType of
|
||||
PgTypeSimple ty -> prepValBldr ty val
|
||||
PgTypeArray ofTy -> do
|
||||
-- for arrays we don't use the prepared builder
|
||||
PGTypeSimple ty -> prepValBldr ty val
|
||||
PGTypeArray ofTy -> do
|
||||
-- for arrays, we don't use the prepared builder
|
||||
vals <- runAesonParser parseJSON val
|
||||
arrayExp <- S.SEArray <$> indexedForM vals (txtRHSBuilder ofTy)
|
||||
return $ S.SETyAnn arrayExp $ S.mkTypeAnn pgType
|
||||
PGScalarTyped scalarType scalarValues <- parsePGScalarValues ofTy vals
|
||||
return $ S.SETyAnn
|
||||
(S.SEArray $ map (toTxtValue . PGScalarTyped scalarType) scalarValues)
|
||||
(S.mkTypeAnn $ PGTypeArray scalarType)
|
||||
|
||||
dmlTxErrorHandler :: Q.PGTxErr -> QErr
|
||||
dmlTxErrorHandler p2Res =
|
||||
@ -266,16 +268,16 @@ dmlTxErrorHandler p2Res =
|
||||
Just (code, msg) -> err400 code msg
|
||||
where err = simplifyError p2Res
|
||||
|
||||
toJSONableExp :: Bool -> PGScalarType -> S.SQLExp -> S.SQLExp
|
||||
toJSONableExp :: Bool -> PGColumnType -> S.SQLExp -> S.SQLExp
|
||||
toJSONableExp strfyNum colTy expn
|
||||
| colTy == PGGeometry || colTy == PGGeography =
|
||||
| isScalarColumnWhere isGeoType colTy =
|
||||
S.SEFnApp "ST_AsGeoJSON"
|
||||
[ expn
|
||||
, S.SEUnsafe "15" -- max decimal digits
|
||||
, S.SEUnsafe "4" -- to print out crs
|
||||
] Nothing
|
||||
`S.SETyAnn` S.jsonTypeAnn
|
||||
| isBigNum colTy && strfyNum =
|
||||
| isScalarColumnWhere isBigNum colTy && strfyNum =
|
||||
expn `S.SETyAnn` S.textTypeAnn
|
||||
| otherwise = expn
|
||||
|
||||
|
@ -108,7 +108,7 @@ mkSelCTEFromColVals qt allCols colVals =
|
||||
let pgCol = pgiName ci
|
||||
val <- onNothing (Map.lookup pgCol colVal) $
|
||||
throw500 $ "column " <> pgCol <<> " not found in returning values"
|
||||
runAesonParser (convToTxt (pgiType ci)) val
|
||||
toTxtValue <$> parsePGScalarValue (pgiType ci) val
|
||||
|
||||
selNoRows =
|
||||
S.mkSelect { S.selExtr = [S.selectStar]
|
||||
|
@ -50,7 +50,7 @@ hasNestedFld = any isNestedMutFld
|
||||
FArr _ -> True
|
||||
_ -> False
|
||||
|
||||
pgColsFromMutFld :: MutFld -> [(PGCol, PGScalarType)]
|
||||
pgColsFromMutFld :: MutFld -> [(PGCol, PGColumnType)]
|
||||
pgColsFromMutFld = \case
|
||||
MCount -> []
|
||||
MExp _ -> []
|
||||
@ -59,7 +59,7 @@ pgColsFromMutFld = \case
|
||||
FCol (PGColInfo col colTy _) _ -> Just (col, colTy)
|
||||
_ -> Nothing
|
||||
|
||||
pgColsFromMutFlds :: MutFlds -> [(PGCol, PGScalarType)]
|
||||
pgColsFromMutFlds :: MutFlds -> [(PGCol, PGColumnType)]
|
||||
pgColsFromMutFlds = concatMap (pgColsFromMutFld . snd)
|
||||
|
||||
pgColsToSelFlds :: [PGColInfo] -> [(FieldName, AnnFld)]
|
||||
@ -111,7 +111,7 @@ mkSelWith qt cte mutFlds singleObj strfyNum =
|
||||
|
||||
checkRetCols
|
||||
:: (UserInfoM m, QErrM m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap PGColInfo
|
||||
-> SelPermInfo
|
||||
-> [PGCol]
|
||||
-> m [PGColInfo]
|
||||
|
@ -30,7 +30,7 @@ import qualified Database.PG.Query as Q
|
||||
import qualified Hasura.SQL.DML as S
|
||||
|
||||
convSelCol :: (UserInfoM m, QErrM m, CacheRM m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap PGColInfo
|
||||
-> SelPermInfo
|
||||
-> SelCol
|
||||
-> m [ExtCol]
|
||||
@ -50,7 +50,7 @@ convSelCol fieldInfoMap spi (SCStar wildcard) =
|
||||
|
||||
convWildcard
|
||||
:: (UserInfoM m, QErrM m, CacheRM m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap PGColInfo
|
||||
-> SelPermInfo
|
||||
-> Wildcard
|
||||
-> m [ExtCol]
|
||||
@ -71,14 +71,14 @@ convWildcard fieldInfoMap (SelPermInfo cols _ _ _ _ _) wildcard =
|
||||
mRelSelPerm <- askPermInfo' PASelect relTabInfo
|
||||
|
||||
forM mRelSelPerm $ \rspi -> do
|
||||
rExtCols <- convWildcard (tiFieldInfoMap relTabInfo) rspi wc
|
||||
rExtCols <- convWildcard (_tiFieldInfoMap relTabInfo) rspi wc
|
||||
return $ ECRel relName Nothing $
|
||||
SelectG rExtCols Nothing Nothing Nothing Nothing
|
||||
|
||||
relExtCols wc = mapM (mkRelCol wc) relColInfos
|
||||
|
||||
resolveStar :: (UserInfoM m, QErrM m, CacheRM m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap PGColInfo
|
||||
-> SelPermInfo
|
||||
-> SelectQ
|
||||
-> m SelectQExt
|
||||
@ -105,7 +105,7 @@ resolveStar fim spi (SelectG selCols mWh mOb mLt mOf) = do
|
||||
convOrderByElem
|
||||
:: (UserInfoM m, QErrM m, CacheRM m)
|
||||
=> SessVarBldr m
|
||||
-> (FieldInfoMap, SelPermInfo)
|
||||
-> (FieldInfoMap PGColInfo, SelPermInfo)
|
||||
-> OrderByCol
|
||||
-> m AnnObCol
|
||||
convOrderByElem sessVarBldr (flds, spi) = \case
|
||||
@ -115,7 +115,7 @@ convOrderByElem sessVarBldr (flds, spi) = \case
|
||||
FIColumn colInfo -> do
|
||||
checkSelOnCol spi (pgiName colInfo)
|
||||
let ty = pgiType colInfo
|
||||
if ty == PGGeography || ty == PGGeometry
|
||||
if isScalarColumnWhere isGeoType ty
|
||||
then throw400 UnexpectedPayload $ mconcat
|
||||
[ fldName <<> " has type 'geometry'"
|
||||
, " and cannot be used in order_by"
|
||||
@ -145,11 +145,11 @@ convOrderByElem sessVarBldr (flds, spi) = \case
|
||||
|
||||
convSelectQ
|
||||
:: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m)
|
||||
=> FieldInfoMap -- Table information of current table
|
||||
=> FieldInfoMap PGColInfo -- Table information of current table
|
||||
-> SelPermInfo -- Additional select permission info
|
||||
-> SelectQExt -- Given Select Query
|
||||
-> SessVarBldr m
|
||||
-> (PGScalarType -> Value -> m S.SQLExp)
|
||||
-> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> m AnnSimpleSel
|
||||
convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do
|
||||
|
||||
@ -200,7 +200,7 @@ convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do
|
||||
|
||||
convExtSimple
|
||||
:: (UserInfoM m, QErrM m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap PGColInfo
|
||||
-> SelPermInfo
|
||||
-> PGCol
|
||||
-> m PGColInfo
|
||||
@ -212,12 +212,12 @@ convExtSimple fieldInfoMap selPermInfo pgCol = do
|
||||
|
||||
convExtRel
|
||||
:: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap PGColInfo
|
||||
-> RelName
|
||||
-> Maybe RelName
|
||||
-> SelectQExt
|
||||
-> SessVarBldr m
|
||||
-> (PGScalarType -> Value -> m S.SQLExp)
|
||||
-> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> m (Either ObjSel ArrSel)
|
||||
convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do
|
||||
-- Point to the name key
|
||||
@ -250,15 +250,15 @@ convExtRel fieldInfoMap relName mAlias selQ sessVarBldr prepValBldr = do
|
||||
convSelectQuery
|
||||
:: (UserInfoM m, QErrM m, CacheRM m, HasSQLGenCtx m)
|
||||
=> SessVarBldr m
|
||||
-> (PGScalarType -> Value -> m S.SQLExp)
|
||||
-> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> SelectQuery
|
||||
-> m AnnSimpleSel
|
||||
convSelectQuery sessVarBldr prepArgBuilder (DMLQuery qt selQ) = do
|
||||
tabInfo <- withPathK "table" $ askTabInfo qt
|
||||
selPermInfo <- askSelPermInfo tabInfo
|
||||
extSelQ <- resolveStar (tiFieldInfoMap tabInfo) selPermInfo selQ
|
||||
extSelQ <- resolveStar (_tiFieldInfoMap tabInfo) selPermInfo selQ
|
||||
validateHeaders $ spiRequiredHeaders selPermInfo
|
||||
convSelectQ (tiFieldInfoMap tabInfo) selPermInfo
|
||||
convSelectQ (_tiFieldInfoMap tabInfo) selPermInfo
|
||||
extSelQ sessVarBldr prepArgBuilder
|
||||
|
||||
mkFuncSelectSimple
|
||||
|
@ -67,9 +67,9 @@ mkUpdateCTE (AnnUpd tn setExps (permFltr, wc) _ _) =
|
||||
|
||||
convInc
|
||||
:: (QErrM m)
|
||||
=> (PGScalarType -> Value -> m S.SQLExp)
|
||||
=> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> PGCol
|
||||
-> PGScalarType
|
||||
-> PGColumnType
|
||||
-> Value
|
||||
-> m (PGCol, S.SQLExp)
|
||||
convInc f col colType val = do
|
||||
@ -78,9 +78,9 @@ convInc f col colType val = do
|
||||
|
||||
convMul
|
||||
:: (QErrM m)
|
||||
=> (PGScalarType -> Value -> m S.SQLExp)
|
||||
=> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> PGCol
|
||||
-> PGScalarType
|
||||
-> PGColumnType
|
||||
-> Value
|
||||
-> m (PGCol, S.SQLExp)
|
||||
convMul f col colType val = do
|
||||
@ -89,25 +89,25 @@ convMul f col colType val = do
|
||||
|
||||
convSet
|
||||
:: (QErrM m)
|
||||
=> (PGScalarType -> Value -> m S.SQLExp)
|
||||
=> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> PGCol
|
||||
-> PGScalarType
|
||||
-> PGColumnType
|
||||
-> Value
|
||||
-> m (PGCol, S.SQLExp)
|
||||
convSet f col colType val = do
|
||||
prepExp <- f colType val
|
||||
return (col, prepExp)
|
||||
|
||||
convDefault :: (Monad m) => PGCol -> PGScalarType -> () -> m (PGCol, S.SQLExp)
|
||||
convDefault :: (Monad m) => PGCol -> PGColumnType -> () -> m (PGCol, S.SQLExp)
|
||||
convDefault col _ _ = return (col, S.SEUnsafe "DEFAULT")
|
||||
|
||||
convOp
|
||||
:: (UserInfoM m, QErrM m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap PGColInfo
|
||||
-> [PGCol]
|
||||
-> UpdPermInfo
|
||||
-> [(PGCol, a)]
|
||||
-> (PGCol -> PGScalarType -> a -> m (PGCol, S.SQLExp))
|
||||
-> (PGCol -> PGColumnType -> a -> m (PGCol, S.SQLExp))
|
||||
-> m [(PGCol, S.SQLExp)]
|
||||
convOp fieldInfoMap preSetCols updPerm objs conv =
|
||||
forM objs $ \(pgCol, a) -> do
|
||||
@ -129,7 +129,7 @@ convOp fieldInfoMap preSetCols updPerm objs conv =
|
||||
validateUpdateQueryWith
|
||||
:: (UserInfoM m, QErrM m, CacheRM m)
|
||||
=> SessVarBldr m
|
||||
-> (PGScalarType -> Value -> m S.SQLExp)
|
||||
-> (PGColumnType -> Value -> m S.SQLExp)
|
||||
-> UpdateQuery
|
||||
-> m AnnUpd
|
||||
validateUpdateQueryWith sessVarBldr prepValBldr uq = do
|
||||
@ -138,7 +138,7 @@ validateUpdateQueryWith sessVarBldr prepValBldr uq = do
|
||||
|
||||
-- If it is view then check if it is updatable
|
||||
mutableView tableName viIsUpdatable
|
||||
(tiViewInfo tableInfo) "updatable"
|
||||
(_tiViewInfo tableInfo) "updatable"
|
||||
|
||||
-- Check if the role has update permissions
|
||||
updPerm <- askUpdPermInfo tableInfo
|
||||
@ -150,7 +150,7 @@ validateUpdateQueryWith sessVarBldr prepValBldr uq = do
|
||||
selPerm <- modifyErr (<> selNecessaryMsg) $
|
||||
askSelPermInfo tableInfo
|
||||
|
||||
let fieldInfoMap = tiFieldInfoMap tableInfo
|
||||
let fieldInfoMap = _tiFieldInfoMap tableInfo
|
||||
allCols = getCols fieldInfoMap
|
||||
preSetObj = upiSet updPerm
|
||||
preSetCols = M.keys preSetObj
|
||||
|
@ -2,14 +2,11 @@ module Hasura.RQL.GBoolExp
|
||||
( toSQLBoolExp
|
||||
, getBoolExpDeps
|
||||
, annBoolExp
|
||||
, txtRHSBuilder
|
||||
, pgValParser
|
||||
) where
|
||||
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
import Hasura.SQL.Value
|
||||
|
||||
import qualified Hasura.SQL.DML as S
|
||||
|
||||
@ -21,16 +18,16 @@ import qualified Data.HashMap.Strict as M
|
||||
import qualified Data.Text.Extended as T
|
||||
|
||||
type OpRhsParser m v =
|
||||
PgType -> Value -> m v
|
||||
PGType PGColumnType -> Value -> m v
|
||||
|
||||
-- | Represents a reference to a Postgres column, possibly casted an arbitrary
|
||||
-- number of times. Used within 'parseOperationsExpression' for bookkeeping.
|
||||
data ColumnReference
|
||||
= ColumnReferenceColumn !PGColInfo
|
||||
| ColumnReferenceCast !ColumnReference !PGScalarType
|
||||
| ColumnReferenceCast !ColumnReference !PGColumnType
|
||||
deriving (Show, Eq)
|
||||
|
||||
columnReferenceType :: ColumnReference -> PGScalarType
|
||||
columnReferenceType :: ColumnReference -> PGColumnType
|
||||
columnReferenceType = \case
|
||||
ColumnReferenceColumn column -> pgiType column
|
||||
ColumnReferenceCast _ targetType -> targetType
|
||||
@ -46,7 +43,7 @@ parseOperationsExpression
|
||||
:: forall m v
|
||||
. (MonadError QErr m)
|
||||
=> OpRhsParser m v
|
||||
-> FieldInfoMap
|
||||
-> FieldInfoMap PGColInfo
|
||||
-> PGColInfo
|
||||
-> Value
|
||||
-> m [OpExpG v]
|
||||
@ -59,7 +56,7 @@ parseOperationsExpression rhsParser fim columnInfo =
|
||||
Object o -> mapM (parseOperation column) (M.toList o)
|
||||
val -> pure . AEQ False <$> rhsParser columnType val
|
||||
where
|
||||
columnType = PgTypeSimple $ columnReferenceType column
|
||||
columnType = PGTypeSimple $ columnReferenceType column
|
||||
|
||||
parseOperation :: ColumnReference -> (T.Text, Value) -> m (OpExpG v)
|
||||
parseOperation column (opStr, val) = withPathK opStr $
|
||||
@ -114,17 +111,17 @@ parseOperationsExpression rhsParser fim columnInfo =
|
||||
"_is_null" -> parseIsNull
|
||||
|
||||
-- jsonb type
|
||||
"_contains" -> jsonbOnlyOp $ AContains <$> parseOne
|
||||
"$contains" -> jsonbOnlyOp $ AContains <$> parseOne
|
||||
"_contained_in" -> jsonbOnlyOp $ AContainedIn <$> parseOne
|
||||
"$contained_in" -> jsonbOnlyOp $ AContainedIn <$> parseOne
|
||||
"_has_key" -> jsonbOnlyOp $ AHasKey <$> parseWithTy PGText
|
||||
"$has_key" -> jsonbOnlyOp $ AHasKey <$> parseWithTy PGText
|
||||
"_contains" -> guardType [PGJSONB] >> AContains <$> parseOne
|
||||
"$contains" -> guardType [PGJSONB] >> AContains <$> parseOne
|
||||
"_contained_in" -> guardType [PGJSONB] >> AContainedIn <$> parseOne
|
||||
"$contained_in" -> guardType [PGJSONB] >> AContainedIn <$> parseOne
|
||||
"_has_key" -> guardType [PGJSONB] >> AHasKey <$> parseWithTy (PGColumnScalar PGText)
|
||||
"$has_key" -> guardType [PGJSONB] >> AHasKey <$> parseWithTy (PGColumnScalar PGText)
|
||||
|
||||
"_has_keys_any" -> jsonbOnlyOp $ AHasKeysAny <$> parseManyWithType PGText
|
||||
"$has_keys_any" -> jsonbOnlyOp $ AHasKeysAny <$> parseManyWithType PGText
|
||||
"_has_keys_all" -> jsonbOnlyOp $ AHasKeysAll <$> parseManyWithType PGText
|
||||
"$has_keys_all" -> jsonbOnlyOp $ AHasKeysAll <$> parseManyWithType PGText
|
||||
"_has_keys_any" -> guardType [PGJSONB] >> AHasKeysAny <$> parseManyWithType (PGColumnScalar PGText)
|
||||
"$has_keys_any" -> guardType [PGJSONB] >> AHasKeysAny <$> parseManyWithType (PGColumnScalar PGText)
|
||||
"_has_keys_all" -> guardType [PGJSONB] >> AHasKeysAll <$> parseManyWithType (PGColumnScalar PGText)
|
||||
"$has_keys_all" -> guardType [PGJSONB] >> AHasKeysAll <$> parseManyWithType (PGColumnScalar PGText)
|
||||
|
||||
-- geometry types
|
||||
"_st_contains" -> parseGeometryOp ASTContains
|
||||
@ -177,12 +174,12 @@ parseOperationsExpression rhsParser fim columnInfo =
|
||||
parseLt = ALT <$> parseOne -- <
|
||||
parseGte = AGTE <$> parseOne -- >=
|
||||
parseLte = ALTE <$> parseOne -- <=
|
||||
parseLike = textOnlyOp colTy >> ALIKE <$> parseOne
|
||||
parseNlike = textOnlyOp colTy >> ANLIKE <$> parseOne
|
||||
parseIlike = textOnlyOp colTy >> AILIKE <$> parseOne
|
||||
parseNilike = textOnlyOp colTy >> ANILIKE <$> parseOne
|
||||
parseSimilar = textOnlyOp colTy >> ASIMILAR <$> parseOne
|
||||
parseNsimilar = textOnlyOp colTy >> ANSIMILAR <$> parseOne
|
||||
parseLike = guardType stringTypes >> ALIKE <$> parseOne
|
||||
parseNlike = guardType stringTypes >> ANLIKE <$> parseOne
|
||||
parseIlike = guardType stringTypes >> AILIKE <$> parseOne
|
||||
parseNilike = guardType stringTypes >> ANILIKE <$> parseOne
|
||||
parseSimilar = guardType stringTypes >> ASIMILAR <$> parseOne
|
||||
parseNsimilar = guardType stringTypes >> ANSIMILAR <$> parseOne
|
||||
|
||||
parseIsNull = bool ANISNOTNULL ANISNULL -- is null
|
||||
<$> parseVal
|
||||
@ -199,7 +196,7 @@ parseOperationsExpression rhsParser fim columnInfo =
|
||||
parsedCastOperations <-
|
||||
forM (M.toList castOperations) $ \(targetTypeName, castedComparisons) -> do
|
||||
let targetType = txtToPgColTy targetTypeName
|
||||
castedColumn = ColumnReferenceCast column targetType
|
||||
castedColumn = ColumnReferenceCast column (PGColumnScalar targetType)
|
||||
checkValidCast targetType
|
||||
parsedCastedComparisons <- withPathK targetTypeName $
|
||||
parseOperations castedColumn castedComparisons
|
||||
@ -207,31 +204,27 @@ parseOperationsExpression rhsParser fim columnInfo =
|
||||
return . ACast $ M.fromList parsedCastOperations
|
||||
|
||||
checkValidCast targetType = case (colTy, targetType) of
|
||||
(PGGeometry, PGGeography) -> return ()
|
||||
(PGGeography, PGGeometry) -> return ()
|
||||
(PGColumnScalar PGGeometry, PGGeography) -> return ()
|
||||
(PGColumnScalar PGGeography, PGGeometry) -> return ()
|
||||
_ -> throw400 UnexpectedPayload $
|
||||
"cannot cast column of type " <> colTy <<> " to type " <>> targetType
|
||||
|
||||
jsonbOnlyOp m = case colTy of
|
||||
PGJSONB -> m
|
||||
ty -> throwError $ buildMsg ty [PGJSONB]
|
||||
|
||||
parseGeometryOp f =
|
||||
geometryOp colTy >> f <$> parseOneNoSess colTy val
|
||||
guardType [PGGeometry] >> f <$> parseOneNoSess colTy val
|
||||
parseGeometryOrGeographyOp f =
|
||||
geometryOrGeographyOp colTy >> f <$> parseOneNoSess colTy val
|
||||
guardType geoTypes >> f <$> parseOneNoSess colTy val
|
||||
|
||||
parseSTDWithinObj = case colTy of
|
||||
PGGeometry -> do
|
||||
PGColumnScalar PGGeometry -> do
|
||||
DWithinGeomOp distVal fromVal <- parseVal
|
||||
dist <- withPathK "distance" $ parseOneNoSess PGFloat distVal
|
||||
dist <- withPathK "distance" $ parseOneNoSess (PGColumnScalar PGFloat) distVal
|
||||
from <- withPathK "from" $ parseOneNoSess colTy fromVal
|
||||
return $ ASTDWithinGeom $ DWithinGeomOp dist from
|
||||
PGGeography -> do
|
||||
PGColumnScalar PGGeography -> do
|
||||
DWithinGeogOp distVal fromVal sphVal <- parseVal
|
||||
dist <- withPathK "distance" $ parseOneNoSess PGFloat distVal
|
||||
dist <- withPathK "distance" $ parseOneNoSess (PGColumnScalar PGFloat) distVal
|
||||
from <- withPathK "from" $ parseOneNoSess colTy fromVal
|
||||
useSpheroid <- withPathK "use_spheroid" $ parseOneNoSess PGBoolean sphVal
|
||||
useSpheroid <- withPathK "use_spheroid" $ parseOneNoSess (PGColumnScalar PGBoolean) sphVal
|
||||
return $ ASTDWithinGeog $ DWithinGeogOp dist from useSpheroid
|
||||
_ -> throwError $ buildMsg colTy [PGGeometry, PGGeography]
|
||||
|
||||
@ -246,36 +239,23 @@ parseOperationsExpression rhsParser fim columnInfo =
|
||||
"incompatible column types : " <> column <<> ", " <>> rhsCol
|
||||
else return rhsCol
|
||||
|
||||
geometryOp PGGeometry = return ()
|
||||
geometryOp ty =
|
||||
throwError $ buildMsg ty [PGGeometry]
|
||||
geometryOrGeographyOp PGGeometry = return ()
|
||||
geometryOrGeographyOp PGGeography = return ()
|
||||
geometryOrGeographyOp ty =
|
||||
throwError $ buildMsg ty [PGGeometry, PGGeography]
|
||||
|
||||
parseWithTy ty = rhsParser (PgTypeSimple ty) val
|
||||
parseWithTy ty = rhsParser (PGTypeSimple ty) val
|
||||
|
||||
-- parse one with the column's type
|
||||
parseOne = parseWithTy colTy
|
||||
parseOneNoSess ty = rhsParser (PgTypeSimple ty)
|
||||
parseOneNoSess ty = rhsParser (PGTypeSimple ty)
|
||||
|
||||
parseManyWithType ty = rhsParser (PgTypeArray ty) val
|
||||
parseManyWithType ty = rhsParser (PGTypeArray ty) val
|
||||
|
||||
guardType validTys = unless (isScalarColumnWhere (`elem` validTys) colTy) $
|
||||
throwError $ buildMsg colTy validTys
|
||||
buildMsg ty expTys = err400 UnexpectedPayload
|
||||
$ " is of type " <> ty <<> "; this operator works only on columns of type "
|
||||
<> T.intercalate "/" (map dquote expTys)
|
||||
|
||||
parseVal :: (FromJSON a) => m a
|
||||
parseVal = decodeValue val
|
||||
|
||||
buildMsg :: PGScalarType -> [PGScalarType] -> QErr
|
||||
buildMsg ty expTys = err400 UnexpectedPayload
|
||||
$ " is of type " <> ty <<> "; this operator works only on columns of type "
|
||||
<> T.intercalate "/" (map dquote expTys)
|
||||
|
||||
textOnlyOp :: (MonadError QErr m) => PGScalarType -> m ()
|
||||
textOnlyOp PGText = return ()
|
||||
textOnlyOp PGVarchar = return ()
|
||||
textOnlyOp ty =
|
||||
throwError $ buildMsg ty [PGVarchar, PGText]
|
||||
|
||||
-- This convoluted expression instead of col = val
|
||||
-- to handle the case of col : null
|
||||
equalsBoolExpBuilder :: S.SQLExp -> S.SQLExp -> S.BoolExp
|
||||
@ -295,7 +275,7 @@ notEqualsBoolExpBuilder qualColExp rhsExp =
|
||||
annBoolExp
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> OpRhsParser m v
|
||||
-> FieldInfoMap
|
||||
-> FieldInfoMap PGColInfo
|
||||
-> BoolExp
|
||||
-> m (AnnBoolExp v)
|
||||
annBoolExp rhsParser fim (BoolExp boolExp) =
|
||||
@ -304,13 +284,13 @@ annBoolExp rhsParser fim (BoolExp boolExp) =
|
||||
annColExp
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> OpRhsParser m v
|
||||
-> FieldInfoMap
|
||||
-> FieldInfoMap PGColInfo
|
||||
-> ColExp
|
||||
-> m (AnnBoolExpFld v)
|
||||
annColExp rhsParser colInfoMap (ColExp fieldName colVal) = do
|
||||
colInfo <- askFieldInfo colInfoMap fieldName
|
||||
case colInfo of
|
||||
FIColumn (PGColInfo _ PGJSON _) ->
|
||||
FIColumn (PGColInfo _ (PGColumnScalar PGJSON) _) ->
|
||||
throwError (err400 UnexpectedPayload "JSON column can not be part of where clause")
|
||||
FIColumn pgi ->
|
||||
AVCol pgi <$> parseOperationsExpression rhsParser colInfoMap pgi colVal
|
||||
@ -355,18 +335,6 @@ convColRhs tableQual = \case
|
||||
where
|
||||
mkQCol q = S.SEQIden . S.QIden q . toIden
|
||||
|
||||
pgValParser
|
||||
:: (MonadError QErr m)
|
||||
=> PGScalarType -> Value -> m PGColValue
|
||||
pgValParser ty =
|
||||
runAesonParser (parsePGValue ty)
|
||||
|
||||
txtRHSBuilder
|
||||
:: (MonadError QErr m)
|
||||
=> PGScalarType -> Value -> m S.SQLExp
|
||||
txtRHSBuilder ty val =
|
||||
toTxtValue ty <$> pgValParser ty val
|
||||
|
||||
mkColCompExp
|
||||
:: S.Qual -> PGCol -> OpExpG S.SQLExp -> S.BoolExp
|
||||
mkColCompExp qual lhsCol = mkCompExp (mkQCol lhsCol)
|
||||
@ -428,7 +396,7 @@ mkColCompExp qual lhsCol = mkCompExp (mkQCol lhsCol)
|
||||
|
||||
mkCastsExp casts =
|
||||
sqlAll . flip map (M.toList casts) $ \(targetType, operations) ->
|
||||
let targetAnn = S.mkTypeAnn $ PgTypeSimple targetType
|
||||
let targetAnn = S.mkTypeAnn $ PGTypeSimple targetType
|
||||
in sqlAll $ map (mkCompExp (S.SETyAnn lhs targetAnn)) operations
|
||||
|
||||
sqlAll = foldr (S.BEBin S.AndOp) (S.BELit True)
|
||||
|
@ -41,6 +41,7 @@ import Hasura.Db as R
|
||||
import Hasura.EncJSON
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.BoolExp as R
|
||||
import Hasura.RQL.Types.Column as R
|
||||
import Hasura.RQL.Types.Common as R
|
||||
import Hasura.RQL.Types.DML as R
|
||||
import Hasura.RQL.Types.Error as R
|
||||
@ -60,9 +61,9 @@ import qualified Network.HTTP.Client as HTTP
|
||||
|
||||
getFieldInfoMap
|
||||
:: QualifiedTable
|
||||
-> SchemaCache -> Maybe FieldInfoMap
|
||||
-> SchemaCache -> Maybe (FieldInfoMap PGColInfo)
|
||||
getFieldInfoMap tn =
|
||||
fmap tiFieldInfoMap . M.lookup tn . scTables
|
||||
fmap _tiFieldInfoMap . M.lookup tn . scTables
|
||||
|
||||
data QCtx
|
||||
= QCtx
|
||||
@ -85,7 +86,7 @@ class (Monad m) => UserInfoM m where
|
||||
|
||||
askTabInfo
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> QualifiedTable -> m TableInfo
|
||||
=> QualifiedTable -> m (TableInfo PGColInfo)
|
||||
askTabInfo tabName = do
|
||||
rawSchemaCache <- askSchemaCache
|
||||
liftMaybe (err400 NotExists errMsg) $ M.lookup tabName $ scTables rawSchemaCache
|
||||
@ -94,11 +95,11 @@ askTabInfo tabName = do
|
||||
|
||||
askTabInfoFromTrigger
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> TriggerName -> m TableInfo
|
||||
=> TriggerName -> m (TableInfo PGColInfo)
|
||||
askTabInfoFromTrigger trn = do
|
||||
sc <- askSchemaCache
|
||||
let tabInfos = M.elems $ scTables sc
|
||||
liftMaybe (err400 NotExists errMsg) $ find (isJust.M.lookup trn.tiEventTriggerInfoMap) tabInfos
|
||||
liftMaybe (err400 NotExists errMsg) $ find (isJust.M.lookup trn._tiEventTriggerInfoMap) tabInfos
|
||||
where
|
||||
errMsg = "event trigger " <> triggerNameToTxt trn <<> " does not exist"
|
||||
|
||||
@ -107,7 +108,7 @@ askEventTriggerInfo
|
||||
=> TriggerName -> m EventTriggerInfo
|
||||
askEventTriggerInfo trn = do
|
||||
ti <- askTabInfoFromTrigger trn
|
||||
let etim = tiEventTriggerInfoMap ti
|
||||
let etim = _tiEventTriggerInfoMap ti
|
||||
liftMaybe (err400 NotExists errMsg) $ M.lookup trn etim
|
||||
where
|
||||
errMsg = "event trigger " <> triggerNameToTxt trn <<> " does not exist"
|
||||
@ -164,7 +165,7 @@ liftP1WithQCtx r m =
|
||||
|
||||
askFieldInfoMap
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> QualifiedTable -> m FieldInfoMap
|
||||
=> QualifiedTable -> m (FieldInfoMap PGColInfo)
|
||||
askFieldInfoMap tabName = do
|
||||
mFieldInfoMap <- getFieldInfoMap tabName <$> askSchemaCache
|
||||
maybe (throw400 NotExists errMsg) return mFieldInfoMap
|
||||
@ -173,19 +174,19 @@ askFieldInfoMap tabName = do
|
||||
|
||||
askPGType
|
||||
:: (MonadError QErr m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap PGColInfo
|
||||
-> PGCol
|
||||
-> T.Text
|
||||
-> m PGScalarType
|
||||
-> m PGColumnType
|
||||
askPGType m c msg =
|
||||
pgiType <$> askPGColInfo m c msg
|
||||
|
||||
askPGColInfo
|
||||
:: (MonadError QErr m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap columnInfo
|
||||
-> PGCol
|
||||
-> T.Text
|
||||
-> m PGColInfo
|
||||
-> m columnInfo
|
||||
askPGColInfo m c msg = do
|
||||
colInfo <- modifyErr ("column " <>) $
|
||||
askFieldInfo m (fromPGCol c)
|
||||
@ -200,16 +201,16 @@ askPGColInfo m c msg = do
|
||||
]
|
||||
|
||||
assertPGCol :: (MonadError QErr m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap columnInfo
|
||||
-> T.Text
|
||||
-> PGCol
|
||||
-> m ()
|
||||
assertPGCol m msg c = do
|
||||
_ <- askPGType m c msg
|
||||
_ <- askPGColInfo m c msg
|
||||
return ()
|
||||
|
||||
askRelType :: (MonadError QErr m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap columnInfo
|
||||
-> RelName
|
||||
-> T.Text
|
||||
-> m RelInfo
|
||||
@ -226,9 +227,9 @@ askRelType m r msg = do
|
||||
]
|
||||
|
||||
askFieldInfo :: (MonadError QErr m)
|
||||
=> FieldInfoMap
|
||||
=> FieldInfoMap columnInfo
|
||||
-> FieldName
|
||||
-> m FieldInfo
|
||||
-> m (FieldInfo columnInfo)
|
||||
askFieldInfo m f =
|
||||
case M.lookup f m of
|
||||
Just colInfo -> return colInfo
|
||||
|
@ -20,6 +20,7 @@ module Hasura.RQL.Types.BoolExp
|
||||
, AnnBoolExpFldSQL
|
||||
, AnnBoolExpSQL
|
||||
, PartialSQLExp(..)
|
||||
, mkScalarSessionVar
|
||||
, isStaticValue
|
||||
, AnnBoolExpFldPartialSQL
|
||||
, AnnBoolExpPartialSQL
|
||||
@ -30,6 +31,7 @@ module Hasura.RQL.Types.BoolExp
|
||||
) where
|
||||
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.Column
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.Permission
|
||||
import qualified Hasura.SQL.DML as S
|
||||
@ -280,10 +282,14 @@ type PreSetCols = M.HashMap PGCol S.SQLExp
|
||||
|
||||
-- doesn't resolve the session variable
|
||||
data PartialSQLExp
|
||||
= PSESessVar !PgType !SessVar
|
||||
= PSESessVar !(PGType PGScalarType) !SessVar
|
||||
| PSESQLExp !S.SQLExp
|
||||
deriving (Show, Eq, Data)
|
||||
|
||||
mkScalarSessionVar :: PGType PGColumnType -> SessVar -> PartialSQLExp
|
||||
mkScalarSessionVar columnType =
|
||||
PSESessVar (unsafePGColumnToRepresentation <$> columnType)
|
||||
|
||||
instance ToJSON PartialSQLExp where
|
||||
toJSON = \case
|
||||
PSESessVar colTy sessVar -> toJSON (colTy, sessVar)
|
||||
|
@ -1,8 +1,30 @@
|
||||
module Hasura.RQL.Types.Catalog where
|
||||
-- | This module provides 'fetchCatalogData', which loads the entire catalog in one go from the
|
||||
-- database, consulting tables such as @hdb_catalog.hdb_table@. It is used by
|
||||
-- 'Hasura.RQL.Schema.Table.buildSchemaCache' to seed or reload the schema cache.
|
||||
module Hasura.RQL.Types.Catalog
|
||||
( fetchCatalogData
|
||||
, CatalogMetadata(..)
|
||||
|
||||
, CatalogTable(..)
|
||||
, CatalogTableInfo(..)
|
||||
|
||||
, CatalogRelation(..)
|
||||
, CatalogPermission(..)
|
||||
, CatalogEventTrigger(..)
|
||||
, CatalogFunction(..)
|
||||
) where
|
||||
|
||||
import Hasura.Prelude
|
||||
|
||||
import qualified Database.PG.Query as Q
|
||||
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.TH
|
||||
|
||||
import Hasura.Db
|
||||
import Hasura.RQL.DDL.Schema.Function
|
||||
import Hasura.RQL.Types.Column
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.EventTrigger
|
||||
import Hasura.RQL.Types.Permission
|
||||
@ -11,15 +33,21 @@ import Hasura.RQL.Types.RemoteSchema
|
||||
import Hasura.RQL.Types.SchemaCache
|
||||
import Hasura.SQL.Types
|
||||
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.TH
|
||||
data CatalogTableInfo
|
||||
= CatalogTableInfo
|
||||
{ _ctiColumns :: ![PGRawColInfo]
|
||||
, _ctiConstraints :: ![ConstraintName]
|
||||
, _ctiPrimaryKeyColumns :: ![PGCol]
|
||||
, _ctiViewInfo :: !(Maybe ViewInfo)
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 4 snakeCase) ''CatalogTableInfo)
|
||||
|
||||
data CatalogTable
|
||||
= CatalogTable
|
||||
{ _ctTable :: !QualifiedTable
|
||||
, _ctSystemDefined :: !Bool
|
||||
, _ctInfo :: !(Maybe TableInfo)
|
||||
{ _ctName :: !QualifiedTable
|
||||
, _ctIsSystemDefined :: !Bool
|
||||
, _ctIsEnum :: !Bool
|
||||
, _ctInfo :: !(Maybe CatalogTableInfo)
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogTable)
|
||||
|
||||
@ -70,3 +98,9 @@ data CatalogMetadata
|
||||
, _cmAllowlistCollections :: ![CollectionDef]
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogMetadata)
|
||||
|
||||
-- | See "Hasura.RQL.Types.Catalog".
|
||||
fetchCatalogData :: (MonadTx m) => m CatalogMetadata
|
||||
fetchCatalogData =
|
||||
liftTx $ Q.getAltJ . runIdentity . Q.getRow <$> Q.withQE defaultTxErrorHandler
|
||||
$(Q.sqlFromFile "src-rsr/catalog_metadata.sql") () True
|
||||
|
156
server/src-lib/Hasura/RQL/Types/Column.hs
Normal file
156
server/src-lib/Hasura/RQL/Types/Column.hs
Normal file
@ -0,0 +1,156 @@
|
||||
module Hasura.RQL.Types.Column
|
||||
( PGColumnType(..)
|
||||
, _PGColumnScalar
|
||||
, _PGColumnEnumReference
|
||||
, isScalarColumnWhere
|
||||
|
||||
, parsePGScalarValue
|
||||
, parsePGScalarValues
|
||||
, unsafePGColumnToRepresentation
|
||||
|
||||
, PGColInfo(..)
|
||||
, PGRawColInfo(..)
|
||||
, onlyIntCols
|
||||
, onlyNumCols
|
||||
, onlyJSONBCols
|
||||
, onlyComparableCols
|
||||
, getColInfos
|
||||
|
||||
, EnumReference(..)
|
||||
, EnumValues
|
||||
, EnumValue(..)
|
||||
, EnumValueInfo(..)
|
||||
) where
|
||||
|
||||
import Hasura.Prelude
|
||||
|
||||
import qualified Data.HashMap.Strict as M
|
||||
import qualified Data.Text as T
|
||||
|
||||
import Control.Lens.TH
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.TH
|
||||
import Language.Haskell.TH.Syntax (Lift)
|
||||
|
||||
import Hasura.RQL.Instances ()
|
||||
import Hasura.RQL.Types.Error
|
||||
import Hasura.SQL.Types
|
||||
import Hasura.SQL.Value
|
||||
|
||||
newtype EnumValue
|
||||
= EnumValue { getEnumValue :: T.Text }
|
||||
deriving (Show, Eq, Lift, Hashable, ToJSON, ToJSONKey, FromJSON, FromJSONKey)
|
||||
|
||||
newtype EnumValueInfo
|
||||
= EnumValueInfo
|
||||
{ evComment :: Maybe T.Text
|
||||
} deriving (Show, Eq, Lift, Hashable)
|
||||
$(deriveJSON (aesonDrop 2 snakeCase) ''EnumValueInfo)
|
||||
|
||||
type EnumValues = M.HashMap EnumValue EnumValueInfo
|
||||
|
||||
-- | Represents a reference to an “enum table,” a single-column Postgres table that is referenced
|
||||
-- via foreign key.
|
||||
data EnumReference
|
||||
= EnumReference
|
||||
{ erTable :: !QualifiedTable
|
||||
, erValues :: !EnumValues
|
||||
} deriving (Show, Eq, Generic, Lift)
|
||||
instance Hashable EnumReference
|
||||
$(deriveJSON (aesonDrop 2 snakeCase) ''EnumReference)
|
||||
|
||||
-- | The type we use for columns, which are currently always “scalars” (though see the note about
|
||||
-- 'PGType'). Unlike 'PGScalarType', which represents a type that /Postgres/ knows about, this type
|
||||
-- characterizes distinctions we make but Postgres doesn’t.
|
||||
data PGColumnType
|
||||
-- | Ordinary Postgres columns.
|
||||
= PGColumnScalar !PGScalarType
|
||||
-- | Columns that reference enum tables (see "Hasura.RQL.Schema.Enum"). This is not actually a
|
||||
-- distinct type from the perspective of Postgres (at the time of this writing, we ensure they
|
||||
-- always have type @text@), but we really want to distinguish this case, since we treat it
|
||||
-- /completely/ differently in the GraphQL schema.
|
||||
| PGColumnEnumReference !EnumReference
|
||||
deriving (Show, Eq, Generic)
|
||||
instance Hashable PGColumnType
|
||||
$(deriveToJSON defaultOptions{constructorTagModifier = drop 8} ''PGColumnType)
|
||||
$(makePrisms ''PGColumnType)
|
||||
|
||||
instance DQuote PGColumnType where
|
||||
dquoteTxt = \case
|
||||
PGColumnScalar scalar -> dquoteTxt scalar
|
||||
PGColumnEnumReference (EnumReference tableName _) -> dquoteTxt tableName
|
||||
|
||||
isScalarColumnWhere :: (PGScalarType -> Bool) -> PGColumnType -> Bool
|
||||
isScalarColumnWhere f = \case
|
||||
PGColumnScalar scalar -> f scalar
|
||||
PGColumnEnumReference _ -> False
|
||||
|
||||
-- | Gets the representation type associated with a 'PGColumnType'. Avoid using this if possible.
|
||||
-- Prefer 'parsePGScalarValue', 'parsePGScalarValues', or
|
||||
-- 'Hasura.RQL.Types.BoolExp.mkScalarSessionVar'.
|
||||
unsafePGColumnToRepresentation :: PGColumnType -> PGScalarType
|
||||
unsafePGColumnToRepresentation = \case
|
||||
PGColumnScalar scalarType -> scalarType
|
||||
PGColumnEnumReference _ -> PGText
|
||||
|
||||
parsePGScalarValue :: (MonadError QErr m) => PGColumnType -> Value -> m (PGScalarTyped PGColValue)
|
||||
parsePGScalarValue columnType value = case columnType of
|
||||
PGColumnScalar scalarType ->
|
||||
PGScalarTyped scalarType <$> runAesonParser (parsePGValue scalarType) value
|
||||
PGColumnEnumReference (EnumReference tableName enumValues) -> do
|
||||
let typeName = snakeCaseQualObject tableName
|
||||
flip runAesonParser value . withText (T.unpack typeName) $ \textValue -> do
|
||||
let enumTextValues = map getEnumValue $ M.keys enumValues
|
||||
unless (textValue `elem` enumTextValues) $
|
||||
fail . T.unpack
|
||||
$ "expected one of the values " <> T.intercalate ", " (map dquote enumTextValues)
|
||||
<> " for type " <> typeName <<> ", given " <>> textValue
|
||||
pure $ PGScalarTyped PGText (PGValText textValue)
|
||||
|
||||
parsePGScalarValues
|
||||
:: (MonadError QErr m)
|
||||
=> PGColumnType -> [Value] -> m (PGScalarTyped [PGColValue])
|
||||
parsePGScalarValues columnType values = do
|
||||
scalarValues <- indexedMapM (fmap pstValue . parsePGScalarValue columnType) values
|
||||
pure $ PGScalarTyped (unsafePGColumnToRepresentation columnType) scalarValues
|
||||
|
||||
-- | “Raw” column info, as stored in the catalog (but not in the schema cache). Instead of
|
||||
-- containing a 'PGColumnType', it only contains a 'PGScalarType', which is combined with the
|
||||
-- 'pcirReferences' field and other table data to eventually resolve the type to a 'PGColumnType'.
|
||||
data PGRawColInfo
|
||||
= PGRawColInfo
|
||||
{ prciName :: !PGCol
|
||||
, prciType :: !PGScalarType
|
||||
, prciIsNullable :: !Bool
|
||||
, prciReferences :: ![QualifiedTable]
|
||||
-- ^ only stores single-column references to primary key of foreign tables (used for detecting
|
||||
-- references to enum tables)
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 4 snakeCase) ''PGRawColInfo)
|
||||
|
||||
-- | “Resolved” column info, produced from a 'PGRawColInfo' value that has been combined with other
|
||||
-- schema information to produce a 'PGColumnType'.
|
||||
data PGColInfo
|
||||
= PGColInfo
|
||||
{ pgiName :: !PGCol
|
||||
, pgiType :: !PGColumnType
|
||||
, pgiIsNullable :: !Bool
|
||||
} deriving (Show, Eq)
|
||||
$(deriveToJSON (aesonDrop 3 snakeCase) ''PGColInfo)
|
||||
|
||||
onlyIntCols :: [PGColInfo] -> [PGColInfo]
|
||||
onlyIntCols = filter (isScalarColumnWhere isIntegerType . pgiType)
|
||||
|
||||
onlyNumCols :: [PGColInfo] -> [PGColInfo]
|
||||
onlyNumCols = filter (isScalarColumnWhere isNumType . pgiType)
|
||||
|
||||
onlyJSONBCols :: [PGColInfo] -> [PGColInfo]
|
||||
onlyJSONBCols = filter (isScalarColumnWhere (== PGJSONB) . pgiType)
|
||||
|
||||
onlyComparableCols :: [PGColInfo] -> [PGColInfo]
|
||||
onlyComparableCols = filter (isScalarColumnWhere isComparableType . pgiType)
|
||||
|
||||
getColInfos :: [PGCol] -> [PGColInfo] -> [PGColInfo]
|
||||
getColInfos cols allColInfos =
|
||||
flip filter allColInfos $ \ci -> pgiName ci `elem` cols
|
@ -1,6 +1,5 @@
|
||||
module Hasura.RQL.Types.Common
|
||||
( PGColInfo(..)
|
||||
, RelName(..)
|
||||
( RelName(..)
|
||||
, relNameToTxt
|
||||
, RelType(..)
|
||||
, rootRelName
|
||||
@ -38,15 +37,6 @@ import Instances.TH.Lift ()
|
||||
import Language.Haskell.TH.Syntax (Lift)
|
||||
import qualified PostgreSQL.Binary.Decoding as PD
|
||||
|
||||
data PGColInfo
|
||||
= PGColInfo
|
||||
{ pgiName :: !PGCol
|
||||
, pgiType :: !PGScalarType
|
||||
, pgiIsNullable :: !Bool
|
||||
} deriving (Show, Eq)
|
||||
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''PGColInfo)
|
||||
|
||||
newtype NonEmptyText = NonEmptyText {unNonEmptyText :: T.Text}
|
||||
deriving (Show, Eq, Ord, Hashable, ToJSON, ToJSONKey, Lift, Q.ToPrepArg, DQuote)
|
||||
|
||||
|
@ -18,7 +18,8 @@ data MetadataObjType
|
||||
| MOTEventTrigger
|
||||
| MOTFunction
|
||||
| MOTRemoteSchema
|
||||
deriving (Eq)
|
||||
deriving (Eq, Generic)
|
||||
instance Hashable MetadataObjType
|
||||
|
||||
instance Show MetadataObjType where
|
||||
show MOTTable = "table"
|
||||
@ -36,7 +37,6 @@ data TableMetadataObjId
|
||||
| MTOPerm !RoleName !PermType
|
||||
| MTOTrigger !TriggerName
|
||||
deriving (Show, Eq, Generic)
|
||||
|
||||
instance Hashable TableMetadataObjId
|
||||
|
||||
data MetadataObjId
|
||||
@ -45,7 +45,6 @@ data MetadataObjId
|
||||
| MORemoteSchema !RemoteSchemaName
|
||||
| MOTableObj !QualifiedTable !TableMetadataObjId
|
||||
deriving (Show, Eq, Generic)
|
||||
|
||||
instance Hashable MetadataObjId
|
||||
|
||||
data InconsistentMetadataObj
|
||||
@ -54,7 +53,8 @@ data InconsistentMetadataObj
|
||||
, _moType :: !MetadataObjType
|
||||
, _moDef :: !Value
|
||||
, _moReason :: !T.Text
|
||||
} deriving (Show, Eq)
|
||||
} deriving (Show, Eq, Generic)
|
||||
instance Hashable InconsistentMetadataObj
|
||||
|
||||
instance ToJSON InconsistentMetadataObj where
|
||||
toJSON (InconsistentMetadataObj _ ty info rsn) =
|
||||
|
@ -2,27 +2,36 @@
|
||||
{-# LANGUAGE RankNTypes #-}
|
||||
|
||||
module Hasura.RQL.Types.SchemaCache
|
||||
( TableCache
|
||||
, SchemaCache(..)
|
||||
( SchemaCache(..)
|
||||
, SchemaCacheVer
|
||||
, initSchemaCacheVer
|
||||
, incSchemaCacheVer
|
||||
, emptySchemaCache
|
||||
|
||||
, TableCache
|
||||
, modTableCache
|
||||
, addTableToCache
|
||||
, modTableInCache
|
||||
, delTableFromCache
|
||||
|
||||
, TableInfo(..)
|
||||
, tiName
|
||||
, tiSystemDefined
|
||||
, tiFieldInfoMap
|
||||
, tiRolePermInfoMap
|
||||
, tiUniqOrPrimConstraints
|
||||
, tiPrimaryKeyCols
|
||||
, tiViewInfo
|
||||
, tiEventTriggerInfoMap
|
||||
, tiEnumValues
|
||||
|
||||
, TableConstraint(..)
|
||||
, ConstraintType(..)
|
||||
, ViewInfo(..)
|
||||
, isMutable
|
||||
, mutableView
|
||||
, onlyIntCols
|
||||
, onlyNumCols
|
||||
, onlyJSONBCols
|
||||
, onlyComparableCols
|
||||
, isUniqueOrPrimary
|
||||
, isForeignKey
|
||||
, addTableToCache
|
||||
, modTableInCache
|
||||
, delTableFromCache
|
||||
|
||||
, RemoteSchemaCtx(..)
|
||||
, RemoteSchemaMap
|
||||
@ -36,17 +45,16 @@ module Hasura.RQL.Types.SchemaCache
|
||||
|
||||
, FieldInfoMap
|
||||
, FieldInfo(..)
|
||||
, _FIColumn
|
||||
, _FIRelationship
|
||||
, fieldInfoToEither
|
||||
, partitionFieldInfos
|
||||
, partitionFieldInfosWith
|
||||
, getCols
|
||||
, getRels
|
||||
|
||||
, PGColInfo(..)
|
||||
, isPGColInfo
|
||||
, getColInfos
|
||||
, RelInfo(..)
|
||||
-- , addFldToCache
|
||||
, addColToCache
|
||||
, addRelToCache
|
||||
|
||||
@ -107,6 +115,7 @@ import qualified Hasura.GraphQL.Context as GC
|
||||
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.BoolExp
|
||||
import Hasura.RQL.Types.Column
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.Error
|
||||
import Hasura.RQL.Types.EventTrigger
|
||||
@ -137,58 +146,42 @@ mkColDep :: DependencyReason -> QualifiedTable -> PGCol -> SchemaDependency
|
||||
mkColDep reason tn col =
|
||||
flip SchemaDependency reason . SOTableObj tn $ TOCol col
|
||||
|
||||
onlyIntCols :: [PGColInfo] -> [PGColInfo]
|
||||
onlyIntCols = filter (isIntegerType . pgiType)
|
||||
|
||||
onlyNumCols :: [PGColInfo] -> [PGColInfo]
|
||||
onlyNumCols = filter (isNumType . pgiType)
|
||||
|
||||
onlyJSONBCols :: [PGColInfo] -> [PGColInfo]
|
||||
onlyJSONBCols = filter (isJSONBType . pgiType)
|
||||
|
||||
onlyComparableCols :: [PGColInfo] -> [PGColInfo]
|
||||
onlyComparableCols = filter (isComparableType . pgiType)
|
||||
|
||||
getColInfos :: [PGCol] -> [PGColInfo] -> [PGColInfo]
|
||||
getColInfos cols allColInfos =
|
||||
flip filter allColInfos $ \ci -> pgiName ci `elem` cols
|
||||
|
||||
type WithDeps a = (a, [SchemaDependency])
|
||||
|
||||
data FieldInfo
|
||||
= FIColumn !PGColInfo
|
||||
data FieldInfo columnInfo
|
||||
= FIColumn !columnInfo
|
||||
| FIRelationship !RelInfo
|
||||
deriving (Show, Eq)
|
||||
|
||||
$(deriveToJSON
|
||||
defaultOptions { constructorTagModifier = snakeCase . drop 2
|
||||
, sumEncoding = TaggedObject "type" "detail"
|
||||
}
|
||||
''FieldInfo)
|
||||
$(makePrisms ''FieldInfo)
|
||||
|
||||
fieldInfoToEither :: FieldInfo -> Either PGColInfo RelInfo
|
||||
fieldInfoToEither :: FieldInfo columnInfo -> Either columnInfo RelInfo
|
||||
fieldInfoToEither (FIColumn l) = Left l
|
||||
fieldInfoToEither (FIRelationship r) = Right r
|
||||
|
||||
partitionFieldInfos :: [FieldInfo] -> ([PGColInfo], [RelInfo])
|
||||
partitionFieldInfos :: [FieldInfo columnInfo] -> ([columnInfo], [RelInfo])
|
||||
partitionFieldInfos = partitionFieldInfosWith (id, id)
|
||||
|
||||
partitionFieldInfosWith :: (PGColInfo -> a, RelInfo -> b)
|
||||
-> [FieldInfo] -> ([a], [b])
|
||||
partitionFieldInfosWith :: (columnInfo -> a, RelInfo -> b)
|
||||
-> [FieldInfo columnInfo] -> ([a], [b])
|
||||
partitionFieldInfosWith fns =
|
||||
partitionEithers . map (biMapEither fns . fieldInfoToEither)
|
||||
where
|
||||
biMapEither (f1, f2) = either (Left . f1) (Right . f2)
|
||||
|
||||
type FieldInfoMap = M.HashMap FieldName FieldInfo
|
||||
type FieldInfoMap columnInfo = M.HashMap FieldName (FieldInfo columnInfo)
|
||||
|
||||
getCols :: FieldInfoMap -> [PGColInfo]
|
||||
getCols :: FieldInfoMap columnInfo -> [columnInfo]
|
||||
getCols fim = lefts $ map fieldInfoToEither $ M.elems fim
|
||||
|
||||
getRels :: FieldInfoMap -> [RelInfo]
|
||||
getRels :: FieldInfoMap columnInfo -> [RelInfo]
|
||||
getRels fim = rights $ map fieldInfoToEither $ M.elems fim
|
||||
|
||||
isPGColInfo :: FieldInfo -> Bool
|
||||
isPGColInfo :: FieldInfo columnInfo -> Bool
|
||||
isPGColInfo (FIColumn _) = True
|
||||
isPGColInfo _ = False
|
||||
|
||||
@ -331,32 +324,20 @@ mutableView qt f mVI operation =
|
||||
unless (isMutable f mVI) $ throw400 NotSupported $
|
||||
"view " <> qt <<> " is not " <> operation
|
||||
|
||||
data TableInfo
|
||||
data TableInfo columnInfo
|
||||
= TableInfo
|
||||
{ tiName :: !QualifiedTable
|
||||
, tiSystemDefined :: !Bool
|
||||
, tiFieldInfoMap :: !FieldInfoMap
|
||||
, tiRolePermInfoMap :: !RolePermInfoMap
|
||||
, tiUniqOrPrimConstraints :: ![ConstraintName]
|
||||
, tiPrimaryKeyCols :: ![PGCol]
|
||||
, tiViewInfo :: !(Maybe ViewInfo)
|
||||
, tiEventTriggerInfoMap :: !EventTriggerInfoMap
|
||||
{ _tiName :: !QualifiedTable
|
||||
, _tiSystemDefined :: !Bool
|
||||
, _tiFieldInfoMap :: !(FieldInfoMap columnInfo)
|
||||
, _tiRolePermInfoMap :: !RolePermInfoMap
|
||||
, _tiUniqOrPrimConstraints :: ![ConstraintName]
|
||||
, _tiPrimaryKeyCols :: ![PGCol]
|
||||
, _tiViewInfo :: !(Maybe ViewInfo)
|
||||
, _tiEventTriggerInfoMap :: !EventTriggerInfoMap
|
||||
, _tiEnumValues :: !(Maybe EnumValues)
|
||||
} deriving (Show, Eq)
|
||||
|
||||
$(deriveToJSON (aesonDrop 2 snakeCase) ''TableInfo)
|
||||
|
||||
instance FromJSON TableInfo where
|
||||
parseJSON = withObject "TableInfo" $ \o -> do
|
||||
name <- o .: "name"
|
||||
columns <- o .: "columns"
|
||||
pkeyCols <- o .: "primary_key_columns"
|
||||
constraints <- o .: "constraints"
|
||||
viewInfoM <- o .:? "view_info"
|
||||
isSystemDefined <- o .:? "is_system_defined" .!= False
|
||||
let colMap = M.fromList $ flip map columns $
|
||||
\c -> (fromPGCol $ pgiName c, FIColumn c)
|
||||
return $ TableInfo name isSystemDefined colMap mempty
|
||||
constraints pkeyCols viewInfoM mempty
|
||||
$(makeLenses ''TableInfo)
|
||||
|
||||
data FunctionType
|
||||
= FTVOLATILE
|
||||
@ -398,7 +379,7 @@ data FunctionInfo
|
||||
|
||||
$(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionInfo)
|
||||
|
||||
type TableCache = M.HashMap QualifiedTable TableInfo -- info of all tables
|
||||
type TableCache columnInfo = M.HashMap QualifiedTable (TableInfo columnInfo) -- info of all tables
|
||||
type FunctionCache = M.HashMap QualifiedFunction FunctionInfo -- info of all functions
|
||||
|
||||
data RemoteSchemaCtx
|
||||
@ -443,7 +424,7 @@ incSchemaCacheVer (SchemaCacheVer prev) =
|
||||
|
||||
data SchemaCache
|
||||
= SchemaCache
|
||||
{ scTables :: !TableCache
|
||||
{ scTables :: !(TableCache PGColInfo)
|
||||
, scFunctions :: !FunctionCache
|
||||
, scRemoteSchemas :: !RemoteSchemaMap
|
||||
, scAllowlist :: !(HS.HashSet GQLQuery)
|
||||
@ -486,19 +467,19 @@ emptySchemaCache =
|
||||
SchemaCache M.empty M.empty M.empty
|
||||
HS.empty M.empty GC.emptyGCtx mempty []
|
||||
|
||||
modTableCache :: (CacheRWM m) => TableCache -> m ()
|
||||
modTableCache :: (CacheRWM m) => TableCache PGColInfo -> m ()
|
||||
modTableCache tc = do
|
||||
sc <- askSchemaCache
|
||||
writeSchemaCache $ sc { scTables = tc }
|
||||
|
||||
addTableToCache :: (QErrM m, CacheRWM m)
|
||||
=> TableInfo -> m ()
|
||||
=> TableInfo PGColInfo -> m ()
|
||||
addTableToCache ti = do
|
||||
sc <- askSchemaCache
|
||||
assertTableNotExists tn sc
|
||||
modTableCache $ M.insert tn ti $ scTables sc
|
||||
where
|
||||
tn = tiName ti
|
||||
tn = _tiName ti
|
||||
|
||||
delTableFromCache :: (QErrM m, CacheRWM m)
|
||||
=> QualifiedTable -> m ()
|
||||
@ -514,7 +495,7 @@ delTableFromCache tn = do
|
||||
getTableInfoFromCache :: (QErrM m)
|
||||
=> QualifiedTable
|
||||
-> SchemaCache
|
||||
-> m TableInfo
|
||||
-> m (TableInfo PGColInfo)
|
||||
getTableInfoFromCache tn sc =
|
||||
case M.lookup tn (scTables sc) of
|
||||
Nothing -> throw500 $ "table not found in cache : " <>> tn
|
||||
@ -530,7 +511,7 @@ assertTableNotExists tn sc =
|
||||
Just _ -> throw500 $ "table exists in cache : " <>> tn
|
||||
|
||||
modTableInCache :: (QErrM m, CacheRWM m)
|
||||
=> (TableInfo -> m TableInfo)
|
||||
=> (TableInfo PGColInfo -> m (TableInfo PGColInfo))
|
||||
-> QualifiedTable
|
||||
-> m ()
|
||||
modTableInCache f tn = do
|
||||
@ -558,17 +539,17 @@ addRelToCache rn ri deps tn = do
|
||||
|
||||
addFldToCache
|
||||
:: (QErrM m, CacheRWM m)
|
||||
=> FieldName -> FieldInfo
|
||||
=> FieldName -> FieldInfo PGColInfo
|
||||
-> QualifiedTable -> m ()
|
||||
addFldToCache fn fi =
|
||||
modTableInCache modFieldInfoMap
|
||||
where
|
||||
modFieldInfoMap ti = do
|
||||
let fim = tiFieldInfoMap ti
|
||||
let fim = _tiFieldInfoMap ti
|
||||
case M.lookup fn fim of
|
||||
Just _ -> throw500 "field already exists "
|
||||
Nothing -> return $
|
||||
ti { tiFieldInfoMap = M.insert fn fi fim }
|
||||
ti { _tiFieldInfoMap = M.insert fn fi fim }
|
||||
|
||||
delFldFromCache :: (QErrM m, CacheRWM m)
|
||||
=> FieldName -> QualifiedTable -> m ()
|
||||
@ -576,10 +557,10 @@ delFldFromCache fn =
|
||||
modTableInCache modFieldInfoMap
|
||||
where
|
||||
modFieldInfoMap ti = do
|
||||
let fim = tiFieldInfoMap ti
|
||||
let fim = _tiFieldInfoMap ti
|
||||
case M.lookup fn fim of
|
||||
Just _ -> return $
|
||||
ti { tiFieldInfoMap = M.delete fn fim }
|
||||
ti { _tiFieldInfoMap = M.delete fn fim }
|
||||
Nothing -> throw500 "field does not exist"
|
||||
|
||||
delColFromCache :: (QErrM m, CacheRWM m)
|
||||
@ -639,8 +620,8 @@ addEventTriggerToCache qt eti deps = do
|
||||
where
|
||||
trn = etiName eti
|
||||
modEventTriggerInfo ti = do
|
||||
let etim = tiEventTriggerInfoMap ti
|
||||
return $ ti { tiEventTriggerInfoMap = M.insert trn eti etim}
|
||||
let etim = _tiEventTriggerInfoMap ti
|
||||
return $ ti { _tiEventTriggerInfoMap = M.insert trn eti etim}
|
||||
schObjId = SOTableObj qt $ TOTrigger trn
|
||||
|
||||
delEventTriggerFromCache
|
||||
@ -653,8 +634,8 @@ delEventTriggerFromCache qt trn = do
|
||||
modDepMapInCache (removeFromDepMap schObjId)
|
||||
where
|
||||
modEventTriggerInfo ti = do
|
||||
let etim = tiEventTriggerInfoMap ti
|
||||
return $ ti { tiEventTriggerInfoMap = M.delete trn etim }
|
||||
let etim = _tiEventTriggerInfoMap ti
|
||||
return $ ti { _tiEventTriggerInfoMap = M.delete trn etim }
|
||||
schObjId = SOTableObj qt $ TOTrigger trn
|
||||
|
||||
addFunctionToCache
|
||||
@ -713,11 +694,11 @@ addPermToCache tn rn pa i deps = do
|
||||
where
|
||||
paL = permAccToLens pa
|
||||
modRolePermInfo ti = do
|
||||
let rpim = tiRolePermInfoMap ti
|
||||
let rpim = _tiRolePermInfoMap ti
|
||||
rpi = fromMaybe mkRolePermInfo $ M.lookup rn rpim
|
||||
newRPI = rpi & paL ?~ i
|
||||
assertPermNotExists pa rpi
|
||||
return $ ti { tiRolePermInfoMap = M.insert rn newRPI rpim }
|
||||
return $ ti { _tiRolePermInfoMap = M.insert rn newRPI rpim }
|
||||
schObjId = SOTableObj tn $ TOPerm rn $ permAccToType pa
|
||||
|
||||
assertPermNotExists
|
||||
@ -746,11 +727,11 @@ delPermFromCache pa rn tn = do
|
||||
where
|
||||
paL = permAccToLens pa
|
||||
modRolePermInfo ti = do
|
||||
let rpim = tiRolePermInfoMap ti
|
||||
let rpim = _tiRolePermInfoMap ti
|
||||
rpi = fromMaybe mkRolePermInfo $ M.lookup rn rpim
|
||||
assertPermExists pa rpi
|
||||
let newRPI = rpi & paL .~ Nothing
|
||||
return $ ti { tiRolePermInfoMap = M.insert rn newRPI rpim }
|
||||
return $ ti { _tiRolePermInfoMap = M.insert rn newRPI rpim }
|
||||
schObjId = SOTableObj tn $ TOPerm rn $ permAccToType pa
|
||||
|
||||
addRemoteSchemaToCache
|
||||
|
@ -225,23 +225,23 @@ newtype TypeAnn
|
||||
= TypeAnn {unTypeAnn :: T.Text}
|
||||
deriving (Show, Eq, Data)
|
||||
|
||||
mkTypeAnn :: PgType -> TypeAnn
|
||||
mkTypeAnn :: PGType PGScalarType -> TypeAnn
|
||||
mkTypeAnn = TypeAnn . toSQLTxt
|
||||
|
||||
intTypeAnn :: TypeAnn
|
||||
intTypeAnn = mkTypeAnn $ PgTypeSimple PGInteger
|
||||
intTypeAnn = mkTypeAnn $ PGTypeSimple PGInteger
|
||||
|
||||
textTypeAnn :: TypeAnn
|
||||
textTypeAnn = mkTypeAnn $ PgTypeSimple PGText
|
||||
textTypeAnn = mkTypeAnn $ PGTypeSimple PGText
|
||||
|
||||
textArrTypeAnn :: TypeAnn
|
||||
textArrTypeAnn = mkTypeAnn $ PgTypeArray PGText
|
||||
textArrTypeAnn = mkTypeAnn $ PGTypeArray PGText
|
||||
|
||||
jsonTypeAnn :: TypeAnn
|
||||
jsonTypeAnn = mkTypeAnn $ PgTypeSimple PGJSON
|
||||
jsonTypeAnn = mkTypeAnn $ PGTypeSimple PGJSON
|
||||
|
||||
jsonbTypeAnn :: TypeAnn
|
||||
jsonbTypeAnn = mkTypeAnn $ PgTypeSimple PGJSONB
|
||||
jsonbTypeAnn = mkTypeAnn $ PGTypeSimple PGJSONB
|
||||
|
||||
data CountType
|
||||
= CTStar
|
||||
@ -266,6 +266,7 @@ instance ToSQL TupleExp where
|
||||
|
||||
data SQLExp
|
||||
= SEPrep !Int
|
||||
| SENull
|
||||
| SELit !T.Text
|
||||
| SEUnsafe !T.Text
|
||||
| SESelect !Select
|
||||
@ -286,7 +287,7 @@ data SQLExp
|
||||
deriving (Show, Eq, Data)
|
||||
|
||||
withTyAnn :: PGScalarType -> SQLExp -> SQLExp
|
||||
withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PgTypeSimple colTy
|
||||
withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PGTypeSimple colTy
|
||||
|
||||
instance J.ToJSON SQLExp where
|
||||
toJSON = J.toJSON . toSQLTxt
|
||||
@ -310,6 +311,8 @@ countStar = SECount CTStar
|
||||
instance ToSQL SQLExp where
|
||||
toSQL (SEPrep argNumber) =
|
||||
TB.char '$' <> fromString (show argNumber)
|
||||
toSQL SENull =
|
||||
TB.text "null"
|
||||
toSQL (SELit tv) =
|
||||
TB.text $ pgFmtLit tv
|
||||
toSQL (SEUnsafe t) =
|
||||
|
95
server/src-lib/Hasura/SQL/Error.hs
Normal file
95
server/src-lib/Hasura/SQL/Error.hs
Normal file
@ -0,0 +1,95 @@
|
||||
-- | Functions and datatypes for interpreting Postgres errors.
|
||||
module Hasura.SQL.Error where
|
||||
|
||||
import Hasura.Prelude
|
||||
|
||||
import Control.Lens.TH (makePrisms)
|
||||
|
||||
import qualified Data.Text as T
|
||||
import qualified Database.PG.Query.Connection as Q
|
||||
|
||||
-- | The top-level error code type. Errors in Postgres are divided into different /classes/, which
|
||||
-- are further subdivided into individual error codes. Even if a particular status code is not known
|
||||
-- to the application, it’s possible to determine its class and handle it appropriately.
|
||||
data PgErrorType
|
||||
= PgDataException !(Maybe (PgErrorCode PgDataException))
|
||||
| PgIntegrityConstraintViolation !(Maybe (PgErrorCode PgIntegrityConstraintViolation))
|
||||
| PgSyntaxErrorOrAccessRuleViolation !(Maybe (PgErrorCode PgSyntaxErrorOrAccessRuleViolation))
|
||||
deriving (Show, Eq)
|
||||
|
||||
data PgErrorCode a
|
||||
= PgErrorGeneric
|
||||
-- ^ represents errors that have the non-specific @000@ status code
|
||||
| PgErrorSpecific !a
|
||||
-- ^ represents errors with a known, more specific status code
|
||||
deriving (Show, Eq, Functor)
|
||||
|
||||
data PgDataException
|
||||
= PgInvalidDatetimeFormat
|
||||
| PgInvalidParameterValue
|
||||
| PgInvalidTextRepresentation
|
||||
deriving (Show, Eq)
|
||||
|
||||
data PgIntegrityConstraintViolation
|
||||
= PgRestrictViolation
|
||||
| PgNotNullViolation
|
||||
| PgForeignKeyViolation
|
||||
| PgUniqueViolation
|
||||
| PgCheckViolation
|
||||
| PgExclusionViolation
|
||||
deriving (Show, Eq)
|
||||
|
||||
data PgSyntaxErrorOrAccessRuleViolation
|
||||
= PgUndefinedObject
|
||||
| PgInvalidColumnReference
|
||||
deriving (Show, Eq)
|
||||
|
||||
$(makePrisms ''PgErrorType)
|
||||
$(makePrisms ''PgErrorCode)
|
||||
|
||||
pgErrorType :: Q.PGStmtErrDetail -> Maybe PgErrorType
|
||||
pgErrorType errorDetails = parseTypes =<< Q.edStatusCode errorDetails
|
||||
where
|
||||
parseTypes fullCodeText = choice
|
||||
[ withClass "22" PgDataException
|
||||
[ code "007" PgInvalidDatetimeFormat
|
||||
, code "023" PgInvalidParameterValue
|
||||
, code "P02" PgInvalidTextRepresentation
|
||||
]
|
||||
, withClass "23" PgIntegrityConstraintViolation
|
||||
[ code "001" PgRestrictViolation
|
||||
, code "502" PgNotNullViolation
|
||||
, code "503" PgForeignKeyViolation
|
||||
, code "505" PgUniqueViolation
|
||||
, code "514" PgCheckViolation
|
||||
, code "P01" PgExclusionViolation
|
||||
]
|
||||
, withClass "42" PgSyntaxErrorOrAccessRuleViolation
|
||||
[ code "704" PgUndefinedObject
|
||||
, code "P10" PgInvalidColumnReference
|
||||
]
|
||||
]
|
||||
where
|
||||
(classText, codeText) = T.splitAt 2 fullCodeText
|
||||
|
||||
withClass :: T.Text -> (Maybe a -> b) -> [Maybe a] -> Maybe b
|
||||
withClass expectedClassText mkClass codes =
|
||||
guard (classText == expectedClassText) $> mkClass (choice codes)
|
||||
|
||||
code :: T.Text -> a -> Maybe (PgErrorCode a)
|
||||
code expectedCodeText codeValue =
|
||||
guard (codeText == expectedCodeText) $> PgErrorSpecific codeValue
|
||||
|
||||
pgErrorToText :: Q.PGStmtErrDetail -> T.Text
|
||||
pgErrorToText errorDetail =
|
||||
fromMaybe "postgres error" (Q.edMessage errorDetail)
|
||||
<> maybe "" formatDescription (Q.edDescription errorDetail)
|
||||
<> maybe "" formatHint (Q.edHint errorDetail)
|
||||
where
|
||||
formatDescription description = ";\n" <> prefixLines " " description
|
||||
formatHint hint = "\n hint: " <> prefixLinesExceptFirst " " hint
|
||||
|
||||
prefixLinesExceptFirst prefix content =
|
||||
T.intercalate ("\n" <> prefix) (T.lines content)
|
||||
prefixLines prefix content =
|
||||
prefix <> prefixLinesExceptFirst prefix content
|
@ -146,6 +146,7 @@ uOrderBy (S.OrderByExp ordByItems) =
|
||||
uSqlExp :: S.SQLExp -> Uniq S.SQLExp
|
||||
uSqlExp = restoringIdens . \case
|
||||
S.SEPrep i -> return $ S.SEPrep i
|
||||
S.SENull -> return S.SENull
|
||||
S.SELit t -> return $ S.SELit t
|
||||
S.SEUnsafe t -> return $ S.SEUnsafe t
|
||||
S.SESelect s -> S.SESelect <$> uSelect s
|
||||
|
@ -7,6 +7,7 @@ import Hasura.Prelude
|
||||
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Encoding (text)
|
||||
import Data.Aeson.TH
|
||||
import Data.Aeson.Types (toJSONKeyText)
|
||||
import Instances.TH.Lift ()
|
||||
import Language.Haskell.TH.Syntax (Lift)
|
||||
@ -392,9 +393,15 @@ isNumType PGDouble = True
|
||||
isNumType PGNumeric = True
|
||||
isNumType ty = isIntegerType ty
|
||||
|
||||
isJSONBType :: PGScalarType -> Bool
|
||||
isJSONBType PGJSONB = True
|
||||
isJSONBType _ = False
|
||||
stringTypes :: [PGScalarType]
|
||||
stringTypes = [PGVarchar, PGText]
|
||||
isStringType :: PGScalarType -> Bool
|
||||
isStringType = (`elem` stringTypes)
|
||||
|
||||
jsonTypes :: [PGScalarType]
|
||||
jsonTypes = [PGJSON, PGJSONB]
|
||||
isJSONType :: PGScalarType -> Bool
|
||||
isJSONType = (`elem` jsonTypes)
|
||||
|
||||
isComparableType :: PGScalarType -> Bool
|
||||
isComparableType PGJSON = False
|
||||
@ -413,26 +420,33 @@ isBigNum = \case
|
||||
PGDouble -> True
|
||||
_ -> False
|
||||
|
||||
geoTypes :: [PGScalarType]
|
||||
geoTypes = [PGGeometry, PGGeography]
|
||||
isGeoType :: PGScalarType -> Bool
|
||||
isGeoType = \case
|
||||
PGGeometry -> True
|
||||
PGGeography -> True
|
||||
_ -> False
|
||||
isGeoType = (`elem` geoTypes)
|
||||
|
||||
-- | The type of all Postgres types (i.e. scalars and arrays).
|
||||
data PGScalarTyped a
|
||||
= PGScalarTyped
|
||||
{ pstType :: !PGScalarType
|
||||
, pstValue :: !a
|
||||
} deriving (Show, Eq, Functor, Foldable, Traversable)
|
||||
|
||||
-- | The type of all Postgres types (i.e. scalars and arrays). This type is parameterized so that
|
||||
-- we can have both @'PGType' 'PGScalarType'@ and @'PGType' 'Hasura.RQL.Types.PGColumnType'@, for
|
||||
-- when we care about the distinction made by 'Hasura.RQL.Types.PGColumnType'. If we ever change
|
||||
-- 'Hasura.RQL.Types.PGColumnType' to handle arrays, not just scalars, then the parameterization can
|
||||
-- go away.
|
||||
--
|
||||
-- TODO: This is incorrect modeling, as 'PGScalarType' will capture anything (under 'PGUnknown').
|
||||
-- This should be fixed when support for all types is merged.
|
||||
data PgType
|
||||
= PgTypeSimple !PGScalarType
|
||||
| PgTypeArray !PGScalarType
|
||||
deriving (Show, Eq, Data)
|
||||
data PGType a
|
||||
= PGTypeSimple !a
|
||||
| PGTypeArray !a
|
||||
deriving (Show, Eq, Data, Functor)
|
||||
$(deriveJSON defaultOptions{constructorTagModifier = drop 6} ''PGType)
|
||||
|
||||
instance ToSQL PgType where
|
||||
instance (ToSQL a) => ToSQL (PGType a) where
|
||||
toSQL = \case
|
||||
PgTypeSimple ty -> toSQL ty
|
||||
PGTypeSimple ty -> toSQL ty
|
||||
-- typename array is an sql standard way of declaring types
|
||||
PgTypeArray ty -> toSQL ty <> " array"
|
||||
|
||||
instance ToJSON PgType where
|
||||
toJSON = toJSON . toSQLTxt
|
||||
PGTypeArray ty -> toSQL ty <> " array"
|
||||
|
@ -136,50 +136,29 @@ textToPrepVal t =
|
||||
parsePGValue' :: PGScalarType
|
||||
-> Value
|
||||
-> AT.Parser PGColValue
|
||||
parsePGValue' ty Null =
|
||||
return $ PGNull ty
|
||||
parsePGValue' PGSmallInt val =
|
||||
PGValSmallInt <$> parseJSON val
|
||||
parsePGValue' PGInteger val =
|
||||
PGValInteger <$> parseJSON val
|
||||
parsePGValue' PGBigInt val =
|
||||
PGValBigInt <$> parseJSON val
|
||||
parsePGValue' PGSerial val =
|
||||
PGValInteger <$> parseJSON val
|
||||
parsePGValue' PGBigSerial val =
|
||||
PGValBigInt <$> parseJSON val
|
||||
parsePGValue' PGFloat val =
|
||||
PGValFloat <$> parseJSON val
|
||||
parsePGValue' PGDouble val =
|
||||
PGValDouble <$> parseJSON val
|
||||
parsePGValue' PGNumeric val =
|
||||
PGValNumeric <$> parseJSON val
|
||||
parsePGValue' PGBoolean val =
|
||||
PGValBoolean <$> parseJSON val
|
||||
parsePGValue' PGChar val =
|
||||
PGValChar <$> parseJSON val
|
||||
parsePGValue' PGVarchar val =
|
||||
PGValVarchar <$> parseJSON val
|
||||
parsePGValue' PGText val =
|
||||
PGValText <$> parseJSON val
|
||||
parsePGValue' PGDate val =
|
||||
PGValDate <$> parseJSON val
|
||||
parsePGValue' PGTimeStampTZ val =
|
||||
PGValTimeStampTZ <$> parseJSON val
|
||||
parsePGValue' PGTimeTZ val =
|
||||
PGValTimeTZ <$> parseJSON val
|
||||
parsePGValue' PGJSON val =
|
||||
PGValJSON . Q.JSON <$> parseJSON val
|
||||
parsePGValue' PGJSONB val =
|
||||
PGValJSONB . Q.JSONB <$> parseJSON val
|
||||
parsePGValue' PGGeometry val =
|
||||
PGValGeo <$> parseJSON val
|
||||
parsePGValue' PGGeography val =
|
||||
PGValGeo <$> parseJSON val
|
||||
parsePGValue' (PGUnknown _) (String t) =
|
||||
return $ PGValUnknown t
|
||||
parsePGValue' (PGUnknown tyName) _ =
|
||||
fail $ "A string is expected for type : " ++ T.unpack tyName
|
||||
parsePGValue' ty v = case (ty, v) of
|
||||
(_, Null) -> return $ PGNull ty
|
||||
(PGSmallInt, val) -> PGValSmallInt <$> parseJSON val
|
||||
(PGInteger, val) -> PGValInteger <$> parseJSON val
|
||||
(PGBigInt, val) -> PGValBigInt <$> parseJSON val
|
||||
(PGSerial, val) -> PGValInteger <$> parseJSON val
|
||||
(PGBigSerial, val) -> PGValBigInt <$> parseJSON val
|
||||
(PGFloat, val) -> PGValFloat <$> parseJSON val
|
||||
(PGDouble, val) -> PGValDouble <$> parseJSON val
|
||||
(PGNumeric, val) -> PGValNumeric <$> parseJSON val
|
||||
(PGBoolean, val) -> PGValBoolean <$> parseJSON val
|
||||
(PGChar, val) -> PGValChar <$> parseJSON val
|
||||
(PGVarchar, val) -> PGValVarchar <$> parseJSON val
|
||||
(PGText, val) -> PGValText <$> parseJSON val
|
||||
(PGDate, val) -> PGValDate <$> parseJSON val
|
||||
(PGTimeStampTZ, val) -> PGValTimeStampTZ <$> parseJSON val
|
||||
(PGTimeTZ, val) -> PGValTimeTZ <$> parseJSON val
|
||||
(PGJSON, val) -> PGValJSON . Q.JSON <$> parseJSON val
|
||||
(PGJSONB, val) -> PGValJSONB . Q.JSONB <$> parseJSON val
|
||||
(PGGeometry, val) -> PGValGeo <$> parseJSON val
|
||||
(PGGeography, val) -> PGValGeo <$> parseJSON val
|
||||
(PGUnknown _, String t) -> return $ PGValUnknown t
|
||||
(PGUnknown tyName, _) -> fail $ "A string is expected for type : " ++ T.unpack tyName
|
||||
|
||||
parsePGValue :: PGScalarType -> Value -> AT.Parser PGColValue
|
||||
parsePGValue pct val =
|
||||
@ -187,18 +166,6 @@ parsePGValue pct val =
|
||||
String t -> parsePGValue' pct val <|> return (PGValUnknown t)
|
||||
_ -> parsePGValue' pct val
|
||||
|
||||
convToBin :: PGScalarType
|
||||
-> Value
|
||||
-> AT.Parser Q.PrepArg
|
||||
convToBin ty val =
|
||||
binEncoder <$> parsePGValue ty val
|
||||
|
||||
convToTxt :: PGScalarType
|
||||
-> Value
|
||||
-> AT.Parser S.SQLExp
|
||||
convToTxt ty val =
|
||||
toTxtValue ty <$> parsePGValue ty val
|
||||
|
||||
readEitherTxt :: (Read a) => T.Text -> Either String a
|
||||
readEitherTxt = readEither . T.unpack
|
||||
|
||||
@ -210,26 +177,19 @@ pgValFromJVal :: (FromJSON a) => Value -> Either String a
|
||||
pgValFromJVal = iresToEither . ifromJSON
|
||||
|
||||
withGeoVal :: PGScalarType -> S.SQLExp -> S.SQLExp
|
||||
withGeoVal ty v =
|
||||
bool v applyGeomFromGeoJson isGeoTy
|
||||
where
|
||||
applyGeomFromGeoJson =
|
||||
S.SEFnApp "ST_GeomFromGeoJSON" [v] Nothing
|
||||
|
||||
isGeoTy = case ty of
|
||||
PGGeometry -> True
|
||||
PGGeography -> True
|
||||
_ -> False
|
||||
withGeoVal ty v
|
||||
| isGeoType ty = S.SEFnApp "ST_GeomFromGeoJSON" [v] Nothing
|
||||
| otherwise = v
|
||||
|
||||
toPrepParam :: Int -> PGScalarType -> S.SQLExp
|
||||
toPrepParam i ty =
|
||||
withGeoVal ty $ S.SEPrep i
|
||||
|
||||
toTxtValue :: PGScalarType -> PGColValue -> S.SQLExp
|
||||
toTxtValue ty val =
|
||||
S.withTyAnn ty txtVal
|
||||
where
|
||||
txtVal = withGeoVal ty $ txtEncoder val
|
||||
toBinaryValue :: PGScalarTyped PGColValue -> Q.PrepArg
|
||||
toBinaryValue = binEncoder . pstValue
|
||||
|
||||
toTxtValue :: PGScalarTyped PGColValue -> S.SQLExp
|
||||
toTxtValue (PGScalarTyped ty val) = S.withTyAnn ty . withGeoVal ty $ txtEncoder val
|
||||
|
||||
pgColValueToInt :: PGColValue -> Maybe Int
|
||||
pgColValueToInt (PGValInteger i) = Just $ fromIntegral i
|
||||
|
@ -656,11 +656,11 @@ connInfoErrModifier :: String -> String
|
||||
connInfoErrModifier s = "Fatal Error : " ++ s
|
||||
|
||||
mkConnInfo ::RawConnInfo -> Either String Q.ConnInfo
|
||||
mkConnInfo (RawConnInfo mHost mPort mUser pass mURL mDB opts mRetries) =
|
||||
mkConnInfo (RawConnInfo mHost mPort mUser password mURL mDB opts mRetries) =
|
||||
case (mHost, mPort, mUser, mDB, mURL) of
|
||||
|
||||
(Just host, Just port, Just user, Just db, Nothing) ->
|
||||
return $ Q.ConnInfo host port user pass db opts retries
|
||||
return $ Q.ConnInfo host port user password db opts retries
|
||||
|
||||
(_, _, _, _, Just dbURL) -> maybe (throwError invalidUrlMsg)
|
||||
withRetries $ parseDatabaseUrl dbURL opts
|
||||
|
@ -33,6 +33,7 @@ data RQLQuery
|
||||
= RQAddExistingTableOrView !TrackTable
|
||||
| RQTrackTable !TrackTable
|
||||
| RQUntrackTable !UntrackTable
|
||||
| RQSetTableIsEnum !SetTableIsEnum
|
||||
|
||||
| RQTrackFunction !TrackFunction
|
||||
| RQUntrackFunction !UnTrackFunction
|
||||
@ -173,6 +174,7 @@ queryNeedsReload qi = case qi of
|
||||
RQUntrackTable _ -> True
|
||||
RQTrackFunction _ -> True
|
||||
RQUntrackFunction _ -> True
|
||||
RQSetTableIsEnum _ -> True
|
||||
|
||||
RQCreateObjectRelationship _ -> True
|
||||
RQCreateArrayRelationship _ -> True
|
||||
@ -242,6 +244,7 @@ runQueryM rq =
|
||||
RQAddExistingTableOrView q -> runTrackTableQ q
|
||||
RQTrackTable q -> runTrackTableQ q
|
||||
RQUntrackTable q -> runUntrackTableQ q
|
||||
RQSetTableIsEnum q -> runSetExistingTableIsEnumQ q
|
||||
|
||||
RQTrackFunction q -> runTrackFunc q
|
||||
RQUntrackFunction q -> runUntrackFunc q
|
||||
|
@ -128,8 +128,8 @@ runTelemetry (Logger logger) manager cacheRef dbId instanceId = do
|
||||
|
||||
computeMetrics :: SchemaCache -> Metrics
|
||||
computeMetrics sc =
|
||||
let nTables = Map.size $ Map.filter (isNothing . tiViewInfo) usrTbls
|
||||
nViews = Map.size $ Map.filter (isJust . tiViewInfo) usrTbls
|
||||
let nTables = Map.size $ Map.filter (isNothing . _tiViewInfo) usrTbls
|
||||
nViews = Map.size $ Map.filter (isJust . _tiViewInfo) usrTbls
|
||||
allRels = join $ Map.elems $ Map.map relsOfTbl usrTbls
|
||||
(manualRels, autoRels) = partition riIsManual allRels
|
||||
relMetrics = RelationshipMetric (length manualRels) (length autoRels)
|
||||
@ -143,23 +143,23 @@ computeMetrics sc =
|
||||
permMetrics =
|
||||
PermissionMetric selPerms insPerms updPerms delPerms nRoles
|
||||
evtTriggers = Map.size $ Map.filter (not . Map.null)
|
||||
$ Map.map tiEventTriggerInfoMap usrTbls
|
||||
$ Map.map _tiEventTriggerInfoMap usrTbls
|
||||
rmSchemas = Map.size $ scRemoteSchemas sc
|
||||
funcs = Map.size $ Map.filter (not . fiSystemDefined) $ scFunctions sc
|
||||
|
||||
in Metrics nTables nViews relMetrics permMetrics evtTriggers rmSchemas funcs
|
||||
|
||||
where
|
||||
usrTbls = Map.filter (not . tiSystemDefined) $ scTables sc
|
||||
usrTbls = Map.filter (not . _tiSystemDefined) $ scTables sc
|
||||
|
||||
calcPerms :: (RolePermInfo -> Maybe a) -> [RolePermInfo] -> Int
|
||||
calcPerms fn perms = length $ catMaybes $ map fn perms
|
||||
|
||||
relsOfTbl :: TableInfo -> [RelInfo]
|
||||
relsOfTbl = rights . Map.elems . Map.map fieldInfoToEither . tiFieldInfoMap
|
||||
relsOfTbl :: TableInfo PGColInfo -> [RelInfo]
|
||||
relsOfTbl = rights . Map.elems . Map.map fieldInfoToEither . _tiFieldInfoMap
|
||||
|
||||
permsOfTbl :: TableInfo -> [(RoleName, RolePermInfo)]
|
||||
permsOfTbl = Map.toList . tiRolePermInfoMap
|
||||
permsOfTbl :: TableInfo PGColInfo -> [(RoleName, RolePermInfo)]
|
||||
permsOfTbl = Map.toList . _tiRolePermInfoMap
|
||||
|
||||
|
||||
getDbId :: Q.TxE QErr Text
|
||||
|
@ -14,38 +14,28 @@ from
|
||||
select
|
||||
coalesce(json_agg(
|
||||
json_build_object(
|
||||
'table',
|
||||
json_build_object(
|
||||
'name', json_build_object(
|
||||
'name', ht.table_name,
|
||||
'schema', ht.table_schema
|
||||
),
|
||||
'system_defined', ht.is_system_defined,
|
||||
'info', tables.info
|
||||
'is_enum', ht.is_enum,
|
||||
'is_system_defined', ht.is_system_defined,
|
||||
'info', t.info
|
||||
)
|
||||
), '[]') as items
|
||||
from
|
||||
hdb_catalog.hdb_table as ht
|
||||
left outer join (
|
||||
select
|
||||
table_schema,
|
||||
table_name,
|
||||
json_build_object(
|
||||
'name',
|
||||
json_build_object(
|
||||
'schema', table_schema,
|
||||
'name', table_name
|
||||
),
|
||||
'columns', columns,
|
||||
'primary_key_columns', primary_key_columns,
|
||||
'constraints', constraints,
|
||||
'view_info', view_info
|
||||
) as info
|
||||
from
|
||||
hdb_catalog.hdb_table_info_agg
|
||||
) as tables on (
|
||||
tables.table_schema = ht.table_schema
|
||||
and tables.table_name = ht.table_name
|
||||
)
|
||||
from hdb_catalog.hdb_table as ht
|
||||
left outer join (
|
||||
select
|
||||
table_schema,
|
||||
table_name,
|
||||
jsonb_build_object(
|
||||
'columns', columns,
|
||||
'primary_key_columns', primary_key_columns,
|
||||
'constraints', constraints,
|
||||
'view_info', view_info
|
||||
) as info
|
||||
from hdb_catalog.hdb_table_info_agg
|
||||
) as t using (table_schema, table_name)
|
||||
) as tables,
|
||||
(
|
||||
select
|
||||
|
@ -14,26 +14,11 @@ CREATE TABLE hdb_catalog.hdb_table
|
||||
table_schema TEXT,
|
||||
table_name TEXT,
|
||||
is_system_defined boolean default false,
|
||||
is_enum boolean NOT NULL DEFAULT false,
|
||||
|
||||
PRIMARY KEY (table_schema, table_name)
|
||||
);
|
||||
|
||||
CREATE FUNCTION hdb_catalog.hdb_table_oid_check() RETURNS trigger AS
|
||||
$function$
|
||||
BEGIN
|
||||
IF (EXISTS (SELECT 1 FROM information_schema.tables st WHERE st.table_schema = NEW.table_schema AND st.table_name = NEW.table_name)) THEN
|
||||
return NEW;
|
||||
ELSE
|
||||
RAISE foreign_key_violation using message = 'table_schema, table_name not in information_schema.tables';
|
||||
return NULL;
|
||||
END IF;
|
||||
END;
|
||||
$function$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
CREATE TRIGGER hdb_table_oid_check BEFORE INSERT OR UPDATE ON hdb_catalog.hdb_table
|
||||
FOR EACH ROW EXECUTE PROCEDURE hdb_catalog.hdb_table_oid_check();
|
||||
|
||||
CREATE TABLE hdb_catalog.hdb_relationship
|
||||
(
|
||||
table_schema TEXT,
|
||||
@ -83,7 +68,9 @@ SELECT
|
||||
min(q.ref_table) :: text as ref_table,
|
||||
json_object_agg(ac.attname, afc.attname) as column_mapping,
|
||||
min(q.confupdtype) :: text as on_update,
|
||||
min(q.confdeltype) :: text as on_delete
|
||||
min(q.confdeltype) :: text as on_delete,
|
||||
json_agg(ac.attname) as columns,
|
||||
json_agg(afc.attname) as ref_columns
|
||||
FROM
|
||||
(SELECT
|
||||
ctn.nspname AS table_schema,
|
||||
@ -431,6 +418,37 @@ CREATE TRIGGER hdb_schema_update_event_notifier AFTER INSERT OR UPDATE ON
|
||||
hdb_catalog.hdb_schema_update_event FOR EACH ROW EXECUTE PROCEDURE
|
||||
hdb_catalog.hdb_schema_update_event_notifier();
|
||||
|
||||
CREATE VIEW hdb_catalog.hdb_column AS
|
||||
WITH primary_key_references AS (
|
||||
SELECT fkey.table_schema AS src_table_schema
|
||||
, fkey.table_name AS src_table_name
|
||||
, fkey.columns->>0 AS src_column_name
|
||||
, json_agg(json_build_object(
|
||||
'schema', fkey.ref_table_table_schema,
|
||||
'name', fkey.ref_table
|
||||
)) AS ref_tables
|
||||
FROM hdb_catalog.hdb_foreign_key_constraint AS fkey
|
||||
JOIN hdb_catalog.hdb_primary_key AS pkey
|
||||
ON pkey.table_schema = fkey.ref_table_table_schema
|
||||
AND pkey.table_name = fkey.ref_table
|
||||
AND pkey.columns::jsonb = fkey.ref_columns::jsonb
|
||||
WHERE json_array_length(fkey.columns) = 1
|
||||
GROUP BY fkey.table_schema
|
||||
, fkey.table_name
|
||||
, fkey.columns->>0)
|
||||
SELECT columns.table_schema
|
||||
, columns.table_name
|
||||
, columns.column_name AS name
|
||||
, columns.udt_name AS type
|
||||
, columns.is_nullable
|
||||
, columns.ordinal_position
|
||||
, coalesce(pkey_refs.ref_tables, '[]') AS primary_key_references
|
||||
FROM information_schema.columns
|
||||
LEFT JOIN primary_key_references AS pkey_refs
|
||||
ON columns.table_schema = pkey_refs.src_table_schema
|
||||
AND columns.table_name = pkey_refs.src_table_name
|
||||
AND columns.column_name = pkey_refs.src_column_name;
|
||||
|
||||
CREATE VIEW hdb_catalog.hdb_table_info_agg AS (
|
||||
select
|
||||
tables.table_name as table_name,
|
||||
@ -447,16 +465,14 @@ from
|
||||
c.table_schema,
|
||||
json_agg(
|
||||
json_build_object(
|
||||
'name',
|
||||
column_name,
|
||||
'type',
|
||||
udt_name,
|
||||
'is_nullable',
|
||||
is_nullable :: boolean
|
||||
'name', name,
|
||||
'type', type,
|
||||
'is_nullable', is_nullable :: boolean,
|
||||
'references', primary_key_references
|
||||
)
|
||||
) as columns
|
||||
from
|
||||
information_schema.columns c
|
||||
hdb_catalog.hdb_column c
|
||||
group by
|
||||
c.table_schema,
|
||||
c.table_name
|
||||
|
158
server/src-rsr/migrate_from_19_to_20.sql
Normal file
158
server/src-rsr/migrate_from_19_to_20.sql
Normal file
@ -0,0 +1,158 @@
|
||||
ALTER TABLE hdb_catalog.hdb_table
|
||||
ADD COLUMN is_enum boolean NOT NULL DEFAULT false;
|
||||
|
||||
DROP TRIGGER hdb_table_oid_check ON hdb_catalog.hdb_table;
|
||||
DROP FUNCTION hdb_catalog.hdb_table_oid_check();
|
||||
|
||||
CREATE OR REPLACE VIEW hdb_catalog.hdb_foreign_key_constraint AS
|
||||
SELECT
|
||||
q.table_schema :: text,
|
||||
q.table_name :: text,
|
||||
q.constraint_name :: text,
|
||||
min(q.constraint_oid) :: integer as constraint_oid,
|
||||
min(q.ref_table_table_schema) :: text as ref_table_table_schema,
|
||||
min(q.ref_table) :: text as ref_table,
|
||||
json_object_agg(ac.attname, afc.attname) as column_mapping,
|
||||
min(q.confupdtype) :: text as on_update,
|
||||
min(q.confdeltype) :: text as on_delete,
|
||||
json_agg(ac.attname) as columns,
|
||||
json_agg(afc.attname) as ref_columns
|
||||
FROM
|
||||
(SELECT
|
||||
ctn.nspname AS table_schema,
|
||||
ct.relname AS table_name,
|
||||
r.conrelid AS table_id,
|
||||
r.conname as constraint_name,
|
||||
r.oid as constraint_oid,
|
||||
cftn.nspname AS ref_table_table_schema,
|
||||
cft.relname as ref_table,
|
||||
r.confrelid as ref_table_id,
|
||||
r.confupdtype,
|
||||
r.confdeltype,
|
||||
UNNEST (r.conkey) AS column_id,
|
||||
UNNEST (r.confkey) AS ref_column_id
|
||||
FROM
|
||||
pg_catalog.pg_constraint r
|
||||
JOIN pg_catalog.pg_class ct
|
||||
ON r.conrelid = ct.oid
|
||||
JOIN pg_catalog.pg_namespace ctn
|
||||
ON ct.relnamespace = ctn.oid
|
||||
JOIN pg_catalog.pg_class cft
|
||||
ON r.confrelid = cft.oid
|
||||
JOIN pg_catalog.pg_namespace cftn
|
||||
ON cft.relnamespace = cftn.oid
|
||||
WHERE
|
||||
r.contype = 'f'
|
||||
) q
|
||||
JOIN pg_catalog.pg_attribute ac
|
||||
ON q.column_id = ac.attnum
|
||||
AND q.table_id = ac.attrelid
|
||||
JOIN pg_catalog.pg_attribute afc
|
||||
ON q.ref_column_id = afc.attnum
|
||||
AND q.ref_table_id = afc.attrelid
|
||||
GROUP BY q.table_schema, q.table_name, q.constraint_name;
|
||||
|
||||
CREATE VIEW hdb_catalog.hdb_column AS
|
||||
WITH primary_key_references AS (
|
||||
SELECT fkey.table_schema AS src_table_schema
|
||||
, fkey.table_name AS src_table_name
|
||||
, fkey.columns->>0 AS src_column_name
|
||||
, json_agg(json_build_object(
|
||||
'schema', fkey.ref_table_table_schema,
|
||||
'name', fkey.ref_table
|
||||
)) AS ref_tables
|
||||
FROM hdb_catalog.hdb_foreign_key_constraint AS fkey
|
||||
JOIN hdb_catalog.hdb_primary_key AS pkey
|
||||
ON pkey.table_schema = fkey.ref_table_table_schema
|
||||
AND pkey.table_name = fkey.ref_table
|
||||
AND pkey.columns::jsonb = fkey.ref_columns::jsonb
|
||||
WHERE json_array_length(fkey.columns) = 1
|
||||
GROUP BY fkey.table_schema
|
||||
, fkey.table_name
|
||||
, fkey.columns->>0)
|
||||
SELECT columns.table_schema
|
||||
, columns.table_name
|
||||
, columns.column_name AS name
|
||||
, columns.udt_name AS type
|
||||
, columns.is_nullable
|
||||
, columns.ordinal_position
|
||||
, coalesce(pkey_refs.ref_tables, '[]') AS primary_key_references
|
||||
FROM information_schema.columns
|
||||
LEFT JOIN primary_key_references AS pkey_refs
|
||||
ON columns.table_schema = pkey_refs.src_table_schema
|
||||
AND columns.table_name = pkey_refs.src_table_name
|
||||
AND columns.column_name = pkey_refs.src_column_name;
|
||||
|
||||
CREATE OR REPLACE VIEW hdb_catalog.hdb_table_info_agg AS (
|
||||
select
|
||||
tables.table_name as table_name,
|
||||
tables.table_schema as table_schema,
|
||||
coalesce(columns.columns, '[]') as columns,
|
||||
coalesce(pk.columns, '[]') as primary_key_columns,
|
||||
coalesce(constraints.constraints, '[]') as constraints,
|
||||
coalesce(views.view_info, 'null') as view_info
|
||||
from
|
||||
information_schema.tables as tables
|
||||
left outer join (
|
||||
select
|
||||
c.table_name,
|
||||
c.table_schema,
|
||||
json_agg(
|
||||
json_build_object(
|
||||
'name', name,
|
||||
'type', type,
|
||||
'is_nullable', is_nullable :: boolean,
|
||||
'references', primary_key_references
|
||||
)
|
||||
) as columns
|
||||
from
|
||||
hdb_catalog.hdb_column c
|
||||
group by
|
||||
c.table_schema,
|
||||
c.table_name
|
||||
) columns on (
|
||||
tables.table_schema = columns.table_schema
|
||||
AND tables.table_name = columns.table_name
|
||||
)
|
||||
left outer join (
|
||||
select * from hdb_catalog.hdb_primary_key
|
||||
) pk on (
|
||||
tables.table_schema = pk.table_schema
|
||||
AND tables.table_name = pk.table_name
|
||||
)
|
||||
left outer join (
|
||||
select
|
||||
c.table_schema,
|
||||
c.table_name,
|
||||
json_agg(constraint_name) as constraints
|
||||
from
|
||||
information_schema.table_constraints c
|
||||
where
|
||||
c.constraint_type = 'UNIQUE'
|
||||
or c.constraint_type = 'PRIMARY KEY'
|
||||
group by
|
||||
c.table_schema,
|
||||
c.table_name
|
||||
) constraints on (
|
||||
tables.table_schema = constraints.table_schema
|
||||
AND tables.table_name = constraints.table_name
|
||||
)
|
||||
left outer join (
|
||||
select
|
||||
table_schema,
|
||||
table_name,
|
||||
json_build_object(
|
||||
'is_updatable',
|
||||
(is_updatable::boolean OR is_trigger_updatable::boolean),
|
||||
'is_deletable',
|
||||
(is_updatable::boolean OR is_trigger_deletable::boolean),
|
||||
'is_insertable',
|
||||
(is_insertable_into::boolean OR is_trigger_insertable_into::boolean)
|
||||
) as view_info
|
||||
from
|
||||
information_schema.views v
|
||||
) views on (
|
||||
tables.table_schema = views.table_schema
|
||||
AND tables.table_name = views.table_name
|
||||
)
|
||||
);
|
@ -20,21 +20,16 @@ FROM
|
||||
table_schema,
|
||||
table_name,
|
||||
json_agg(
|
||||
(
|
||||
SELECT
|
||||
r
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
column_name,
|
||||
udt_name AS data_type,
|
||||
ordinal_position,
|
||||
is_nullable :: boolean
|
||||
) r
|
||||
json_build_object(
|
||||
'column_name', name,
|
||||
'data_type', type,
|
||||
'is_nullable', is_nullable :: boolean,
|
||||
'ordinal_position', ordinal_position,
|
||||
'references', primary_key_references
|
||||
)
|
||||
) as columns
|
||||
FROM
|
||||
information_schema.columns
|
||||
hdb_catalog.hdb_column
|
||||
GROUP BY
|
||||
table_schema,
|
||||
table_name
|
||||
|
@ -32,6 +32,7 @@ extra-deps:
|
||||
|
||||
- reroute-0.5.0.0
|
||||
- Spock-core-0.13.0.0
|
||||
- monad-validate-1.2.0.0
|
||||
|
||||
# Override default flag values for local packages and extra-deps
|
||||
flags: {}
|
||||
|
@ -95,6 +95,13 @@ packages:
|
||||
sha256: 86140298020f68bb09d07b26a6a6f1666fc3a02715d7986b09150727247a1a84
|
||||
original:
|
||||
hackage: Spock-core-0.13.0.0
|
||||
- completed:
|
||||
hackage: monad-validate-1.2.0.0@sha256:9850f408431098b28806dd464b6825a88a0b56c84f380d7fe0454c1df9d6f881,3505
|
||||
pantry-tree:
|
||||
size: 713
|
||||
sha256: 8e049bd12ce2bd470909578f2ee8eb80b89d5ff88860afa30e29dd4eafecfa3e
|
||||
original:
|
||||
hackage: monad-validate-1.2.0.0
|
||||
snapshots:
|
||||
- completed:
|
||||
size: 498167
|
||||
|
@ -0,0 +1,21 @@
|
||||
description: Test deleting records filtered by an enum reference
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
delete_users:
|
||||
affected_rows: 1
|
||||
returning:
|
||||
- name: Alyssa
|
||||
favorite_color: red
|
||||
query:
|
||||
query: |
|
||||
mutation {
|
||||
delete_users(where: {favorite_color: {_eq: red}}) {
|
||||
affected_rows
|
||||
returning {
|
||||
name
|
||||
favorite_color
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,24 @@
|
||||
description: Test inserting a record that references an enum table
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
insert_users:
|
||||
returning:
|
||||
- name: Matthew
|
||||
favorite_color: yellow
|
||||
- name: Robby
|
||||
favorite_color: purple
|
||||
query:
|
||||
query: |
|
||||
mutation {
|
||||
insert_users(objects: [
|
||||
{ name: "Matthew", favorite_color: yellow },
|
||||
{ name: "Robby", favorite_color: purple }
|
||||
]) {
|
||||
returning {
|
||||
name
|
||||
favorite_color
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,19 @@
|
||||
description: Test inserting a record with an invalid enum value
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
errors:
|
||||
- message: 'unexpected value "not_a_real_color" for enum: ''colors_enum'''
|
||||
extensions:
|
||||
code: validation-failed
|
||||
path: $.selectionSet.insert_users.args.objects[0].favorite_color
|
||||
query:
|
||||
query: |
|
||||
mutation {
|
||||
insert_users(objects: [{ name: "Matthew", favorite_color: not_a_real_color }]) {
|
||||
returning {
|
||||
name
|
||||
favorite_color
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
type: bulk
|
||||
args:
|
||||
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
CREATE TABLE colors
|
||||
( value text PRIMARY KEY
|
||||
, comment text );
|
||||
INSERT INTO colors (value, comment) VALUES
|
||||
('red', '#FF0000'),
|
||||
('green', '#00FF00'),
|
||||
('blue', '#0000FF'),
|
||||
('orange', '#FFFF00'),
|
||||
('yellow', '#00FFFF'),
|
||||
('purple', '#FF00FF');
|
||||
|
||||
CREATE TABLE users
|
||||
( id serial PRIMARY KEY
|
||||
, name text NOT NULL
|
||||
, favorite_color text NOT NULL REFERENCES colors );
|
||||
|
||||
- type: track_table
|
||||
args:
|
||||
table: colors
|
||||
is_enum: true
|
||||
- type: track_table
|
||||
args: users
|
@ -0,0 +1,8 @@
|
||||
type: bulk
|
||||
args:
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
DROP TABLE users;
|
||||
DROP TABLE colors;
|
||||
cascade: true
|
@ -0,0 +1,21 @@
|
||||
description: Test updating a record that references an enum table
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
update_users:
|
||||
affected_rows: 1
|
||||
returning:
|
||||
- name: Alyssa
|
||||
favorite_color: blue
|
||||
query:
|
||||
query: |
|
||||
mutation {
|
||||
update_users(where: {id: {_eq: 1}}, _set: {favorite_color: blue}) {
|
||||
affected_rows
|
||||
returning {
|
||||
name
|
||||
favorite_color
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
description: Test updating records filtered by an enum reference
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
update_users:
|
||||
affected_rows: 1
|
||||
returning:
|
||||
- name: Alyssa
|
||||
favorite_color: blue
|
||||
query:
|
||||
query: |
|
||||
mutation {
|
||||
update_users(where: {favorite_color: {_eq: red}}, _set: {favorite_color: blue}) {
|
||||
affected_rows
|
||||
returning {
|
||||
name
|
||||
favorite_color
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,11 @@
|
||||
type: bulk
|
||||
args:
|
||||
|
||||
- type: insert
|
||||
args:
|
||||
table: users
|
||||
objects:
|
||||
- name: Alyssa
|
||||
favorite_color: red
|
||||
- name: Ben
|
||||
favorite_color: blue
|
@ -0,0 +1,8 @@
|
||||
type: bulk
|
||||
args:
|
||||
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
DELETE FROM users;
|
||||
SELECT setval('users_id_seq', 1, FALSE);
|
@ -7,7 +7,7 @@ response:
|
||||
code: validation-failed
|
||||
path: $.selectionSet.author.args.where.name._ne
|
||||
message: |-
|
||||
field "_ne" not found in type: 'text_comparison_exp'
|
||||
field "_ne" not found in type: 'String_comparison_exp'
|
||||
query:
|
||||
query: |
|
||||
query {
|
||||
|
@ -7,7 +7,7 @@ response:
|
||||
code: validation-failed
|
||||
path: $.selectionSet.author.args.where.id._unexpected
|
||||
message: |-
|
||||
field "_unexpected" not found in type: 'integer_comparison_exp'
|
||||
field "_unexpected" not found in type: 'Int_comparison_exp'
|
||||
query:
|
||||
query: |
|
||||
query {
|
||||
|
57
server/tests-py/queries/graphql_query/enums/introspect.yaml
Normal file
57
server/tests-py/queries/graphql_query/enums/introspect.yaml
Normal file
@ -0,0 +1,57 @@
|
||||
description: Test introspecting enum types
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
colors:
|
||||
name: colors_enum
|
||||
kind: ENUM
|
||||
enumValues:
|
||||
- name: blue
|
||||
description: '#0000FF'
|
||||
- name: green
|
||||
description: '#00FF00'
|
||||
- name: orange
|
||||
description: '#FFFF00'
|
||||
- name: purple
|
||||
description: '#FF00FF'
|
||||
- name: red
|
||||
description: '#FF0000'
|
||||
- name: yellow
|
||||
description: '#00FFFF'
|
||||
users:
|
||||
fields:
|
||||
- name: favorite_color
|
||||
type:
|
||||
ofType:
|
||||
name: colors_enum
|
||||
- name: id
|
||||
type:
|
||||
ofType:
|
||||
name: Int
|
||||
- name: name
|
||||
type:
|
||||
ofType:
|
||||
name: String
|
||||
query:
|
||||
query: |
|
||||
{
|
||||
colors: __type(name: "colors_enum") {
|
||||
name
|
||||
kind
|
||||
enumValues {
|
||||
name
|
||||
description
|
||||
}
|
||||
}
|
||||
users: __type(name: "users") {
|
||||
fields {
|
||||
name
|
||||
type {
|
||||
ofType {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,18 @@
|
||||
description: Test querying a table that references an enum table
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
users:
|
||||
- name: Alyssa
|
||||
favorite_color: red
|
||||
- name: Ben
|
||||
favorite_color: blue
|
||||
query:
|
||||
query: |
|
||||
{
|
||||
users {
|
||||
name
|
||||
favorite_color
|
||||
}
|
||||
}
|
@ -0,0 +1,17 @@
|
||||
description: Test querying a table that references an enum table and filtering on enum equality
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
like_red:
|
||||
- name: Alyssa
|
||||
like_blue:
|
||||
- name: Ben
|
||||
like_green: []
|
||||
query:
|
||||
query: |
|
||||
{
|
||||
like_red: users(where: { favorite_color: { _eq: red }}) { name }
|
||||
like_blue: users(where: { favorite_color: { _eq: blue }}) { name }
|
||||
like_green: users(where: { favorite_color: { _eq: green }}) { name }
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
description: Test validation of enum input values
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
errors:
|
||||
- message: 'unexpected value "not_a_real_color" for enum: ''colors_enum'''
|
||||
extensions:
|
||||
code: validation-failed
|
||||
path: $.selectionSet.users.args.where.favorite_color._eq
|
||||
query:
|
||||
query: |
|
||||
{
|
||||
users(where: { favorite_color: { _eq: not_a_real_color }}) { name }
|
||||
}
|
@ -0,0 +1,14 @@
|
||||
description: Test enum input values cannot be string literals
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
errors:
|
||||
- message: expecting an enum
|
||||
extensions:
|
||||
code: validation-failed
|
||||
path: $.selectionSet.users.args.where.favorite_color._eq
|
||||
query:
|
||||
query: |
|
||||
{
|
||||
users(where: { favorite_color: { _eq: "not_a_real_color" }}) { name }
|
||||
}
|
@ -0,0 +1,21 @@
|
||||
description: Test querying a table that references an enum table and filtering on enum equality via a variable
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
like_1:
|
||||
- name: Alyssa
|
||||
like_2:
|
||||
- name: Ben
|
||||
like_3: []
|
||||
query:
|
||||
query: |
|
||||
query ($color_1: colors_enum, $color_2: colors_enum, $color_3: colors_enum) {
|
||||
like_1: users(where: { favorite_color: { _eq: $color_1 }}) { name }
|
||||
like_2: users(where: { favorite_color: { _eq: $color_2 }}) { name }
|
||||
like_3: users(where: { favorite_color: { _eq: $color_3 }}) { name }
|
||||
}
|
||||
variables:
|
||||
color_1: red
|
||||
color_2: blue
|
||||
color_3: green
|
@ -0,0 +1,16 @@
|
||||
description: Test validation of enum values in variables
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
errors:
|
||||
- message: 'unexpected value "not_a_real_color" for enum: ''colors_enum'''
|
||||
extensions:
|
||||
code: validation-failed
|
||||
path: $.variableValues.color
|
||||
query:
|
||||
query: |
|
||||
query ($color: colors_enum) {
|
||||
users(where: { favorite_color: { _eq: $color }}) { name }
|
||||
}
|
||||
variables:
|
||||
color: not_a_real_color
|
31
server/tests-py/queries/graphql_query/enums/setup.yaml
Normal file
31
server/tests-py/queries/graphql_query/enums/setup.yaml
Normal file
@ -0,0 +1,31 @@
|
||||
type: bulk
|
||||
args:
|
||||
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
CREATE TABLE colors
|
||||
( value text PRIMARY KEY
|
||||
, comment text );
|
||||
INSERT INTO colors (value, comment) VALUES
|
||||
('red', '#FF0000'),
|
||||
('green', '#00FF00'),
|
||||
('blue', '#0000FF'),
|
||||
('orange', '#FFFF00'),
|
||||
('yellow', '#00FFFF'),
|
||||
('purple', '#FF00FF');
|
||||
|
||||
CREATE TABLE users
|
||||
( id serial PRIMARY KEY
|
||||
, name text NOT NULL
|
||||
, favorite_color text NOT NULL REFERENCES colors );
|
||||
INSERT INTO users (name, favorite_color) VALUES
|
||||
('Alyssa', 'red'),
|
||||
('Ben', 'blue');
|
||||
|
||||
- type: track_table
|
||||
args:
|
||||
table: colors
|
||||
is_enum: true
|
||||
- type: track_table
|
||||
args: users
|
@ -0,0 +1,8 @@
|
||||
type: bulk
|
||||
args:
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
DROP TABLE users;
|
||||
DROP TABLE colors;
|
||||
cascade: true
|
@ -7,7 +7,7 @@ setup:
|
||||
args:
|
||||
sql: |
|
||||
create table author(
|
||||
id serial primary key,
|
||||
id serial primary key,
|
||||
name text unique
|
||||
);
|
||||
create table article(
|
||||
@ -24,7 +24,7 @@ setup:
|
||||
args:
|
||||
schema: public
|
||||
name: article
|
||||
|
||||
|
||||
#Object relationship
|
||||
- type: create_object_relationship
|
||||
args:
|
||||
@ -32,7 +32,7 @@ setup:
|
||||
name: author
|
||||
using:
|
||||
foreign_key_constraint_on: author_id
|
||||
|
||||
|
||||
#Array relationship
|
||||
- type: create_array_relationship
|
||||
args:
|
||||
@ -53,7 +53,7 @@ inconsistent_objects:
|
||||
column: author_id
|
||||
table: article
|
||||
name: articles
|
||||
comment:
|
||||
comment:
|
||||
table: author
|
||||
reason: table "article" does not exist
|
||||
type: array_relation
|
||||
@ -61,15 +61,15 @@ inconsistent_objects:
|
||||
using:
|
||||
foreign_key_constraint_on: author_id
|
||||
name: author
|
||||
comment:
|
||||
comment:
|
||||
table: article
|
||||
reason: table "article" does not exist
|
||||
type: object_relation
|
||||
- definition: article
|
||||
reason: 'no such table/view exists in postgres : "article"'
|
||||
reason: 'no such table/view exists in postgres: "article"'
|
||||
type: table
|
||||
|
||||
# Teardown
|
||||
# Teardown
|
||||
teardown:
|
||||
type: bulk
|
||||
args:
|
||||
@ -77,4 +77,3 @@ teardown:
|
||||
args:
|
||||
sql: |
|
||||
drop table author
|
||||
|
||||
|
@ -5,6 +5,7 @@ response:
|
||||
remote_schemas: []
|
||||
tables:
|
||||
- table: author
|
||||
is_enum: false
|
||||
object_relationships: []
|
||||
array_relationships:
|
||||
- using:
|
||||
@ -19,6 +20,7 @@ response:
|
||||
delete_permissions: []
|
||||
event_triggers: []
|
||||
- table: article
|
||||
is_enum: false
|
||||
object_relationships:
|
||||
- using:
|
||||
foreign_key_constraint_on: author_id
|
||||
|
128
server/tests-py/queries/v1/set_table_is_enum/add_and_remove.yaml
Normal file
128
server/tests-py/queries/v1/set_table_is_enum/add_and_remove.yaml
Normal file
@ -0,0 +1,128 @@
|
||||
- description: Mark a valid enum table as an enum
|
||||
url: /v1/query
|
||||
status: 200
|
||||
response:
|
||||
message: success
|
||||
query:
|
||||
type: set_table_is_enum
|
||||
args:
|
||||
table: weekdays
|
||||
is_enum: true
|
||||
|
||||
- description: Check that marking a table as an enum changed the schema
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
weekdays:
|
||||
name: weekdays_enum
|
||||
kind: ENUM
|
||||
enumValues:
|
||||
- name: friday
|
||||
description: null
|
||||
- name: monday
|
||||
description: null
|
||||
- name: saturday
|
||||
description: null
|
||||
- name: sunday
|
||||
description: null
|
||||
- name: thursday
|
||||
description: null
|
||||
- name: tuesday
|
||||
description: null
|
||||
- name: wednesday
|
||||
description: null
|
||||
employees:
|
||||
fields:
|
||||
- name: favorite_color
|
||||
type:
|
||||
ofType:
|
||||
name: colors_enum
|
||||
- name: gets_paid_on
|
||||
type:
|
||||
ofType:
|
||||
name: weekdays_enum
|
||||
- name: id
|
||||
type:
|
||||
ofType:
|
||||
name: Int
|
||||
- name: name
|
||||
type:
|
||||
ofType:
|
||||
name: String
|
||||
query:
|
||||
query: |
|
||||
{
|
||||
weekdays: __type(name: "weekdays_enum") {
|
||||
name
|
||||
kind
|
||||
enumValues {
|
||||
name
|
||||
description
|
||||
}
|
||||
}
|
||||
employees: __type(name: "employees") {
|
||||
fields {
|
||||
name
|
||||
type {
|
||||
ofType {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
- description: Mark an existing enum table as not an enum
|
||||
url: /v1/query
|
||||
status: 200
|
||||
response:
|
||||
message: success
|
||||
query:
|
||||
type: set_table_is_enum
|
||||
args:
|
||||
table: colors
|
||||
is_enum: false
|
||||
|
||||
- description: Check that marking a table as not an enum changed the schema
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
colors: null
|
||||
employees:
|
||||
fields:
|
||||
- name: favorite_color
|
||||
type:
|
||||
ofType:
|
||||
name: String
|
||||
- name: gets_paid_on
|
||||
type:
|
||||
ofType:
|
||||
name: weekdays_enum
|
||||
- name: id
|
||||
type:
|
||||
ofType:
|
||||
name: Int
|
||||
- name: name
|
||||
type:
|
||||
ofType:
|
||||
name: String
|
||||
query:
|
||||
query: |
|
||||
{
|
||||
colors: __type(name: "colors_enum") {
|
||||
name
|
||||
kind
|
||||
}
|
||||
employees: __type(name: "employees") {
|
||||
fields {
|
||||
name
|
||||
type {
|
||||
ofType {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,15 @@
|
||||
description: Attempts to mark a non-enum table as an enum are rejected
|
||||
url: /v1/query
|
||||
status: 400
|
||||
response:
|
||||
code: constraint-violation
|
||||
error: |
|
||||
the table "employees" cannot be used as an enum for the following reasons:
|
||||
• the table’s primary key ("id") must have type "text", not type "integer"
|
||||
• the table must have exactly one primary key and optionally one comment column, not 4 columns (favorite_color, gets_paid_on, id, name)
|
||||
path: $.args
|
||||
query:
|
||||
type: set_table_is_enum
|
||||
args:
|
||||
table: employees
|
||||
is_enum: true
|
45
server/tests-py/queries/v1/set_table_is_enum/setup.yaml
Normal file
45
server/tests-py/queries/v1/set_table_is_enum/setup.yaml
Normal file
@ -0,0 +1,45 @@
|
||||
type: bulk
|
||||
args:
|
||||
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
CREATE TABLE colors
|
||||
( value text PRIMARY KEY
|
||||
, comment text );
|
||||
INSERT INTO colors (value, comment) VALUES
|
||||
('red', '#FF0000'),
|
||||
('green', '#00FF00'),
|
||||
('blue', '#0000FF'),
|
||||
('orange', '#FFFF00'),
|
||||
('yellow', '#00FFFF'),
|
||||
('purple', '#FF00FF');
|
||||
|
||||
CREATE TABLE weekdays
|
||||
( value text PRIMARY KEY );
|
||||
INSERT INTO weekdays (value) VALUES
|
||||
('sunday'),
|
||||
('monday'),
|
||||
('tuesday'),
|
||||
('wednesday'),
|
||||
('thursday'),
|
||||
('friday'),
|
||||
('saturday');
|
||||
|
||||
CREATE TABLE employees
|
||||
( id serial PRIMARY KEY
|
||||
, name text NOT NULL
|
||||
, favorite_color text NOT NULL REFERENCES colors
|
||||
, gets_paid_on text NOT NULL REFERENCES weekdays );
|
||||
INSERT INTO employees (name, favorite_color, gets_paid_on) VALUES
|
||||
('Alyssa', 'red', 'monday'),
|
||||
('Ben', 'blue', 'friday');
|
||||
|
||||
- type: track_table
|
||||
args:
|
||||
table: colors
|
||||
is_enum: true
|
||||
- type: track_table
|
||||
args: weekdays
|
||||
- type: track_table
|
||||
args: employees
|
@ -0,0 +1,9 @@
|
||||
type: bulk
|
||||
args:
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
DROP TABLE employees;
|
||||
DROP TABLE weekdays;
|
||||
DROP TABLE colors;
|
||||
cascade: true
|
@ -385,3 +385,24 @@ class TestGraphqlDeletePermissions(DefaultTestMutations):
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return "queries/graphql_mutation/delete/permissions"
|
||||
|
||||
@pytest.mark.parametrize('transport', ['http', 'websocket'])
|
||||
class TestGraphQLMutateEnums(DefaultTestMutations):
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return 'queries/graphql_mutation/enums'
|
||||
|
||||
def test_insert_enum_field(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/insert_enum_field.yaml', transport)
|
||||
|
||||
def test_insert_enum_field_bad_value(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/insert_enum_field_bad_value.yaml', transport)
|
||||
|
||||
def test_update_enum_field(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/update_enum_field.yaml', transport)
|
||||
|
||||
def test_update_where_enum_field(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/update_where_enum_field.yaml', transport)
|
||||
|
||||
def test_delete_where_enum_field(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/delete_where_enum_field.yaml', transport)
|
||||
|
@ -417,3 +417,30 @@ class TestGraphQLQueryFunctions(DefaultTestSelectQueries):
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return 'queries/graphql_query/functions'
|
||||
|
||||
@pytest.mark.parametrize('transport', ['http', 'websocket'])
|
||||
class TestGraphQLQueryEnums(DefaultTestSelectQueries):
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return 'queries/graphql_query/enums'
|
||||
|
||||
def test_introspect(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/introspect.yaml', transport)
|
||||
|
||||
def test_select_enum_field(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/select_enum_field.yaml', transport)
|
||||
|
||||
def test_select_where_enum_eq(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/select_where_enum_eq.yaml', transport)
|
||||
|
||||
def test_select_where_enum_eq_bad_value(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/select_where_enum_eq_bad_value.yaml', transport)
|
||||
|
||||
def test_select_where_enum_eq_string(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/select_where_enum_eq_string.yaml', transport)
|
||||
|
||||
def test_select_where_enum_eq_variable(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/select_where_enum_eq_variable.yaml', transport)
|
||||
|
||||
def test_select_where_enum_eq_variable_bad_value(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/select_where_enum_eq_variable_bad_value.yaml', transport)
|
||||
|
@ -1,6 +1,7 @@
|
||||
import pytest
|
||||
import yaml
|
||||
import json
|
||||
import jsondiff
|
||||
|
||||
from validate import json_ordered
|
||||
|
||||
|
@ -624,3 +624,14 @@ class TestNonEmptyText:
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return "queries/v1/non_empty_text"
|
||||
|
||||
class TestSetTableIsEnum(DefaultTestQueries):
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return 'queries/v1/set_table_is_enum'
|
||||
|
||||
def test_add_and_remove(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/add_and_remove.yaml')
|
||||
|
||||
def test_add_invalid(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/add_invalid.yaml')
|
||||
|
Loading…
Reference in New Issue
Block a user