2021-05-26 19:19:26 +03:00
|
|
|
{-# LANGUAGE UndecidableInstances #-}
|
2019-10-21 19:01:05 +03:00
|
|
|
module Main (main) where
|
|
|
|
|
|
|
|
import Hasura.Prelude
|
|
|
|
|
2021-05-20 13:03:02 +03:00
|
|
|
import qualified Data.Aeson as A
|
|
|
|
import qualified Data.ByteString.Lazy.Char8 as BL
|
|
|
|
import qualified Data.Environment as Env
|
2021-05-24 23:12:53 +03:00
|
|
|
import qualified Data.NonNegativeIntSpec as NonNegetiveIntSpec
|
|
|
|
import qualified Data.Parser.CacheControlSpec as CacheControlParser
|
|
|
|
import qualified Data.Parser.JSONPathSpec as JsonPath
|
|
|
|
import qualified Data.TimeSpec as TimeSpec
|
2021-05-20 13:03:02 +03:00
|
|
|
import qualified Database.PG.Query as Q
|
|
|
|
import qualified Network.HTTP.Client as HTTP
|
|
|
|
import qualified Network.HTTP.Client.TLS as HTTP
|
|
|
|
import qualified Test.Hspec.Runner as Hspec
|
|
|
|
|
2019-11-27 01:49:42 +03:00
|
|
|
import Control.Concurrent.MVar
|
2021-05-20 13:03:02 +03:00
|
|
|
import Control.Natural ((:~>) (..))
|
|
|
|
import Data.Time.Clock (getCurrentTime)
|
2020-12-28 15:56:00 +03:00
|
|
|
import Data.URL.Template
|
2019-10-21 19:01:05 +03:00
|
|
|
import Options.Applicative
|
2021-05-20 13:03:02 +03:00
|
|
|
import System.Environment (getEnvironment)
|
|
|
|
import System.Exit (exitFailure)
|
2019-10-21 19:01:05 +03:00
|
|
|
import Test.Hspec
|
2019-11-18 21:45:54 +03:00
|
|
|
|
2021-05-20 13:03:02 +03:00
|
|
|
import qualified Hasura.EventingSpec as EventingSpec
|
|
|
|
import qualified Hasura.GraphQL.Parser.DirectivesTest as GraphQLDirectivesSpec
|
2021-07-30 14:33:06 +03:00
|
|
|
import qualified Hasura.GraphQL.RemoteServerSpec as RemoteServerSpec
|
2021-05-24 23:12:53 +03:00
|
|
|
import qualified Hasura.GraphQL.Schema.RemoteTest as GraphRemoteSchemaSpec
|
2021-05-20 13:03:02 +03:00
|
|
|
import qualified Hasura.IncrementalSpec as IncrementalSpec
|
|
|
|
import qualified Hasura.RQL.Types.EndpointSpec as EndpointSpec
|
|
|
|
import qualified Hasura.SQL.WKTSpec as WKTSpec
|
|
|
|
import qualified Hasura.Server.AuthSpec as AuthSpec
|
|
|
|
import qualified Hasura.Server.MigrateSpec as MigrateSpec
|
|
|
|
import qualified Hasura.Server.TelemetrySpec as TelemetrySpec
|
2020-12-28 15:56:00 +03:00
|
|
|
|
Clean metadata arguments
## Description
Thanks to #1664, the Metadata API types no longer require a `ToJSON` instance. This PR follows up with a cleanup of the types of the arguments to the metadata API:
- whenever possible, it moves those argument types to where they're used (RQL.DDL.*)
- it removes all unrequired instances (mostly `ToJSON`)
This PR does not attempt to do it for _all_ such argument types. For some of the metadata operations, the type used to describe the argument to the API and used to represent the value in the metadata are one and the same (like for `CreateEndpoint`). Sometimes, the two types are intertwined in complex ways (`RemoteRelationship` and `RemoteRelationshipDef`). In the spirit of only doing uncontroversial cleaning work, this PR only moves types that are not used outside of RQL.DDL.
Furthermore, this is a small step towards separating the different types all jumbled together in RQL.Types.
## Notes
This PR also improves several `FromJSON` instances to make use of `withObject`, and to use a human readable string instead of a type name in error messages whenever possible. For instance:
- before: `expected Object for Object, but encountered X`
after: `expected Object for add computed field, but encountered X`
- before: `Expecting an object for update query`
after: `expected Object for update query, but encountered X`
This PR also renames `CreateFunctionPermission` to `FunctionPermissionArgument`, to remove the quite surprising `type DropFunctionPermission = CreateFunctionPermission`.
This PR also deletes some dead code, mostly in RQL.DML.
This PR also moves a PG-specific source resolving function from DDL.Schema.Source to the only place where it is used: App.hs.
https://github.com/hasura/graphql-engine-mono/pull/1844
GitOrigin-RevId: a594521194bb7fe6a111b02a9e099896f9fed59c
2021-07-27 13:41:42 +03:00
|
|
|
import Hasura.App (PGMetadataStorageAppT (..),
|
|
|
|
mkPgSourceResolver)
|
2021-05-26 19:19:26 +03:00
|
|
|
import Hasura.Metadata.Class
|
2020-12-28 15:56:00 +03:00
|
|
|
import Hasura.RQL.DDL.Schema.Cache
|
|
|
|
import Hasura.RQL.DDL.Schema.Cache.Common
|
|
|
|
import Hasura.RQL.Types
|
|
|
|
import Hasura.Server.Init
|
2019-11-20 21:21:30 +03:00
|
|
|
import Hasura.Server.Migrate
|
2021-02-18 19:46:14 +03:00
|
|
|
import Hasura.Server.Types
|
2020-01-23 00:55:55 +03:00
|
|
|
import Hasura.Server.Version
|
2021-06-29 19:39:57 +03:00
|
|
|
import Hasura.Server.Version.TH
|
2020-12-28 15:56:00 +03:00
|
|
|
|
2019-11-18 21:45:54 +03:00
|
|
|
|
|
|
|
data TestSuites
|
2020-12-28 15:56:00 +03:00
|
|
|
= AllSuites !(Maybe URLTemplate)
|
2020-02-13 20:38:23 +03:00
|
|
|
-- ^ Run all test suites. It probably doesn't make sense to be able to specify additional
|
2020-01-21 21:12:27 +03:00
|
|
|
-- hspec args here.
|
|
|
|
| SingleSuite ![String] !TestSuite
|
|
|
|
-- ^ Args to pass through to hspec (as if from 'getArgs'), and the specific suite to run.
|
2019-11-18 21:45:54 +03:00
|
|
|
|
|
|
|
data TestSuite
|
|
|
|
= UnitSuite
|
2020-12-28 15:56:00 +03:00
|
|
|
| PostgresSuite !(Maybe URLTemplate)
|
2019-10-21 19:01:05 +03:00
|
|
|
|
2019-12-14 09:47:38 +03:00
|
|
|
main :: IO ()
|
2020-01-23 00:55:55 +03:00
|
|
|
main = withVersion $$(getVersionFromEnvironment) $ parseArgs >>= \case
|
2019-11-18 21:45:54 +03:00
|
|
|
AllSuites pgConnOptions -> do
|
|
|
|
postgresSpecs <- buildPostgresSpecs pgConnOptions
|
2020-01-21 21:12:27 +03:00
|
|
|
runHspec [] (unitSpecs *> postgresSpecs)
|
|
|
|
SingleSuite hspecArgs suite -> runHspec hspecArgs =<< case suite of
|
|
|
|
UnitSuite -> pure unitSpecs
|
|
|
|
PostgresSuite pgConnOptions -> buildPostgresSpecs pgConnOptions
|
2019-11-18 21:45:54 +03:00
|
|
|
|
|
|
|
unitSpecs :: Spec
|
|
|
|
unitSpecs = do
|
2021-05-20 13:03:02 +03:00
|
|
|
describe "Data.NonNegativeInt" NonNegetiveIntSpec.spec
|
2020-01-14 00:56:51 +03:00
|
|
|
describe "Data.Parser.CacheControl" CacheControlParser.spec
|
2020-08-31 19:40:01 +03:00
|
|
|
describe "Data.Parser.JSONPath" JsonPath.spec
|
2020-01-16 04:56:57 +03:00
|
|
|
describe "Data.Time" TimeSpec.spec
|
2021-05-20 13:03:02 +03:00
|
|
|
describe "Hasura.Eventing" EventingSpec.spec
|
|
|
|
describe "Hasura.GraphQL.Parser.Directives" GraphQLDirectivesSpec.spec
|
2021-05-24 23:12:53 +03:00
|
|
|
describe "Hasura.GraphQL.Schema.Remote" GraphRemoteSchemaSpec.spec
|
2021-05-20 13:03:02 +03:00
|
|
|
describe "Hasura.Incremental" IncrementalSpec.spec
|
2021-01-29 04:02:34 +03:00
|
|
|
describe "Hasura.RQL.Types.Endpoint" EndpointSpec.spec
|
2021-07-30 14:33:06 +03:00
|
|
|
describe "Hasura.GraphQL.RemoteServer" RemoteServerSpec.spec
|
2021-03-26 19:59:16 +03:00
|
|
|
describe "Hasura.SQL.WKT" WKTSpec.spec
|
2021-05-20 13:03:02 +03:00
|
|
|
describe "Hasura.Server.Auth" AuthSpec.spec
|
|
|
|
describe "Hasura.Server.Telemetry" TelemetrySpec.spec
|
2019-11-18 21:45:54 +03:00
|
|
|
|
2020-12-28 15:56:00 +03:00
|
|
|
buildPostgresSpecs :: HasVersion => Maybe URLTemplate -> IO Spec
|
|
|
|
buildPostgresSpecs maybeUrlTemplate = do
|
2019-11-18 21:45:54 +03:00
|
|
|
env <- getEnvironment
|
2020-12-28 15:56:00 +03:00
|
|
|
let envMap = Env.mkEnvironment env
|
|
|
|
|
|
|
|
pgUrlTemplate <- flip onLeft printErrExit $ runWithEnv env $ do
|
|
|
|
let envVar = fst databaseUrlEnv
|
|
|
|
maybeV <- withEnv maybeUrlTemplate envVar
|
|
|
|
onNothing maybeV $ throwError $
|
|
|
|
"Expected: --database-url or " <> envVar
|
2019-11-18 21:45:54 +03:00
|
|
|
|
2020-12-28 15:56:00 +03:00
|
|
|
pgUrlText <- flip onLeft printErrExit $ renderURLTemplate envMap pgUrlTemplate
|
|
|
|
let pgConnInfo = Q.ConnInfo 1 $ Q.CDDatabaseURI $ txtToBs pgUrlText
|
|
|
|
urlConf = UrlValue $ InputWebhook pgUrlTemplate
|
2021-04-28 19:49:23 +03:00
|
|
|
sourceConnInfo =
|
2021-05-21 04:49:50 +03:00
|
|
|
PostgresSourceConnInfo urlConf (Just setPostgresPoolSettings) True Q.ReadCommitted Nothing
|
2021-02-14 09:07:52 +03:00
|
|
|
sourceConfig = PostgresConnConfiguration sourceConnInfo Nothing
|
2019-11-18 21:45:54 +03:00
|
|
|
|
2020-12-28 15:56:00 +03:00
|
|
|
pgPool <- Q.initPGPool pgConnInfo Q.defaultConnParams { Q.cpConns = 1 } print
|
|
|
|
let pgContext = mkPGExecCtx Q.Serializable pgPool
|
|
|
|
|
|
|
|
setupCacheRef = do
|
2019-11-20 21:21:30 +03:00
|
|
|
httpManager <- HTTP.newManager HTTP.tlsManagerSettings
|
2021-04-08 11:25:11 +03:00
|
|
|
let sqlGenCtx = SQLGenCtx False False
|
2021-02-18 19:46:14 +03:00
|
|
|
maintenanceMode = MaintenanceModeDisabled
|
|
|
|
serverConfigCtx =
|
[Preview] Inherited roles for postgres read queries
fixes #3868
docker image - `hasura/graphql-engine:inherited-roles-preview-48b73a2de`
Note:
To be able to use the inherited roles feature, the graphql-engine should be started with the env variable `HASURA_GRAPHQL_EXPERIMENTAL_FEATURES` set to `inherited_roles`.
Introduction
------------
This PR implements the idea of multiple roles as presented in this [paper](https://www.microsoft.com/en-us/research/wp-content/uploads/2016/02/FGALanguageICDE07.pdf). The multiple roles feature in this PR can be used via inherited roles. An inherited role is a role which can be created by combining multiple singular roles. For example, if there are two roles `author` and `editor` configured in the graphql-engine, then we can create a inherited role with the name of `combined_author_editor` role which will combine the select permissions of the `author` and `editor` roles and then make GraphQL queries using the `combined_author_editor`.
How are select permissions of different roles are combined?
------------------------------------------------------------
A select permission includes 5 things:
1. Columns accessible to the role
2. Row selection filter
3. Limit
4. Allow aggregation
5. Scalar computed fields accessible to the role
Suppose there are two roles, `role1` gives access to the `address` column with row filter `P1` and `role2` gives access to both the `address` and the `phone` column with row filter `P2` and we create a new role `combined_roles` which combines `role1` and `role2`.
Let's say the following GraphQL query is queried with the `combined_roles` role.
```graphql
query {
employees {
address
phone
}
}
```
This will translate to the following SQL query:
```sql
select
(case when (P1 or P2) then address else null end) as address,
(case when P2 then phone else null end) as phone
from employee
where (P1 or P2)
```
The other parameters of the select permission will be combined in the following manner:
1. Limit - Minimum of the limits will be the limit of the inherited role
2. Allow aggregations - If any of the role allows aggregation, then the inherited role will allow aggregation
3. Scalar computed fields - same as table column fields, as in the above example
APIs for inherited roles:
----------------------
1. `add_inherited_role`
`add_inherited_role` is the [metadata API](https://hasura.io/docs/1.0/graphql/core/api-reference/index.html#schema-metadata-api) to create a new inherited role. It accepts two arguments
`role_name`: the name of the inherited role to be added (String)
`role_set`: list of roles that need to be combined (Array of Strings)
Example:
```json
{
"type": "add_inherited_role",
"args": {
"role_name":"combined_user",
"role_set":[
"user",
"user1"
]
}
}
```
After adding the inherited role, the inherited role can be used like single roles like earlier
Note:
An inherited role can only be created with non-inherited/singular roles.
2. `drop_inherited_role`
The `drop_inherited_role` API accepts the name of the inherited role and drops it from the metadata. It accepts a single argument:
`role_name`: name of the inherited role to be dropped
Example:
```json
{
"type": "drop_inherited_role",
"args": {
"role_name":"combined_user"
}
}
```
Metadata
---------
The derived roles metadata will be included under the `experimental_features` key while exporting the metadata.
```json
{
"experimental_features": {
"derived_roles": [
{
"role_name": "manager_is_employee_too",
"role_set": [
"employee",
"manager"
]
}
]
}
}
```
Scope
------
Only postgres queries and subscriptions are supported in this PR.
Important points:
-----------------
1. All columns exposed to an inherited role will be marked as `nullable`, this is done so that cell value nullification can be done.
TODOs
-------
- [ ] Tests
- [ ] Test a GraphQL query running with a inherited role without enabling inherited roles in experimental features
- [] Tests for aggregate queries, limit, computed fields, functions, subscriptions (?)
- [ ] Introspection test with a inherited role (nullability changes in a inherited role)
- [ ] Docs
- [ ] Changelog
Co-authored-by: Vamshi Surabhi <6562944+0x777@users.noreply.github.com>
GitOrigin-RevId: 3b8ee1e11f5ceca80fe294f8c074d42fbccfec63
2021-03-08 14:14:13 +03:00
|
|
|
ServerConfigCtx FunctionPermissionsInferred RemoteSchemaPermsDisabled sqlGenCtx maintenanceMode mempty
|
2021-01-29 08:48:17 +03:00
|
|
|
cacheBuildParams = CacheBuildParams httpManager (mkPgSourceResolver print) serverConfigCtx
|
2021-05-26 19:19:26 +03:00
|
|
|
pgLogger = print
|
2019-11-20 21:21:30 +03:00
|
|
|
|
2021-05-26 19:19:26 +03:00
|
|
|
run :: MetadataStorageT (PGMetadataStorageAppT CacheBuild) a -> IO a
|
2020-12-28 15:56:00 +03:00
|
|
|
run =
|
2021-05-26 19:19:26 +03:00
|
|
|
runMetadataStorageT
|
|
|
|
>>> flip runPGMetadataStorageAppT (pgPool, pgLogger)
|
|
|
|
>>> runCacheBuild cacheBuildParams
|
2019-11-20 21:21:30 +03:00
|
|
|
>>> runExceptT
|
|
|
|
>=> flip onLeft printErrJExit
|
2021-05-26 19:19:26 +03:00
|
|
|
>=> flip onLeft printErrJExit
|
2019-11-20 21:21:30 +03:00
|
|
|
|
2020-12-28 15:56:00 +03:00
|
|
|
(metadata, schemaCache) <- run do
|
|
|
|
metadata <- snd <$> (liftEitherM . runExceptT . runLazyTx pgContext Q.ReadWrite)
|
2021-02-18 19:46:14 +03:00
|
|
|
(migrateCatalog (Just sourceConfig) maintenanceMode =<< liftIO getCurrentTime)
|
2021-08-06 06:00:29 +03:00
|
|
|
-- TODO: Decide if this should be passed in via reader
|
|
|
|
tlsAllowlist <- newEmptyTlsAllowlist
|
|
|
|
schemaCache <- lift $ lift $ buildRebuildableSchemaCache envMap metadata tlsAllowlist
|
2020-12-28 15:56:00 +03:00
|
|
|
pure (metadata, schemaCache)
|
|
|
|
|
2019-11-20 21:21:30 +03:00
|
|
|
cacheRef <- newMVar schemaCache
|
2020-12-28 15:56:00 +03:00
|
|
|
pure $ NT (run . flip MigrateSpec.runCacheRefT cacheRef . fmap fst . runMetadataT metadata)
|
2019-11-20 21:21:30 +03:00
|
|
|
|
|
|
|
pure $ beforeAll setupCacheRef $
|
2020-12-28 15:56:00 +03:00
|
|
|
describe "Hasura.Server.Migrate" $ MigrateSpec.spec sourceConfig pgContext pgConnInfo
|
2019-11-18 21:45:54 +03:00
|
|
|
|
|
|
|
parseArgs :: IO TestSuites
|
|
|
|
parseArgs = execParser $ info (helper <*> (parseNoCommand <|> parseSubCommand)) $
|
|
|
|
fullDesc <> header "Hasura GraphQL Engine test suite"
|
2019-10-21 19:01:05 +03:00
|
|
|
where
|
2020-12-28 15:56:00 +03:00
|
|
|
parseDbUrlTemplate =
|
|
|
|
parseDatabaseUrl <|> (fmap rawConnDetailsToUrl <$> parseRawConnDetails)
|
|
|
|
parseNoCommand = AllSuites <$> parseDbUrlTemplate
|
2020-02-13 20:38:23 +03:00
|
|
|
parseSubCommand = SingleSuite <$> parseHspecPassThroughArgs <*> subCmd
|
|
|
|
where
|
2020-01-21 21:12:27 +03:00
|
|
|
subCmd = subparser $ mconcat
|
|
|
|
[ command "unit" $ info (pure UnitSuite) $
|
|
|
|
progDesc "Only run unit tests"
|
2020-12-28 15:56:00 +03:00
|
|
|
, command "postgres" $ info (helper <*> (PostgresSuite <$> parseDbUrlTemplate)) $
|
2020-01-21 21:12:27 +03:00
|
|
|
progDesc "Only run Postgres integration tests"
|
|
|
|
]
|
|
|
|
-- Add additional arguments and tweak as needed:
|
|
|
|
hspecArgs = ["match", "skip"]
|
|
|
|
-- parse to a list of arguments as they'd appear from 'getArgs':
|
|
|
|
parseHspecPassThroughArgs :: Parser [String]
|
|
|
|
parseHspecPassThroughArgs = fmap concat $ for hspecArgs $ \nm->
|
|
|
|
fmap (maybe [] (\a -> ["--"<>nm , a])) $ optional $
|
|
|
|
strOption ( long nm <>
|
|
|
|
metavar "<PATTERN>" <>
|
|
|
|
help "Flag passed through to hspec (see hspec docs)." )
|
|
|
|
|
|
|
|
|
|
|
|
runHspec :: [String] -> Spec -> IO ()
|
|
|
|
runHspec hspecArgs m = do
|
|
|
|
config <- Hspec.readConfig Hspec.defaultConfig hspecArgs
|
2019-11-18 21:45:54 +03:00
|
|
|
Hspec.evaluateSummary =<< Hspec.runSpec m config
|
|
|
|
|
|
|
|
printErrExit :: String -> IO a
|
|
|
|
printErrExit = (*> exitFailure) . putStrLn
|
2019-11-20 21:21:30 +03:00
|
|
|
|
|
|
|
printErrJExit :: (A.ToJSON a) => a -> IO b
|
|
|
|
printErrJExit = (*> exitFailure) . BL.putStrLn . A.encode
|