2019-10-21 19:01:05 +03:00
|
|
|
module Main (main) where
|
|
|
|
|
|
|
|
import Hasura.Prelude
|
|
|
|
|
2019-11-27 01:49:42 +03:00
|
|
|
import Control.Concurrent.MVar
|
2020-12-28 15:56:00 +03:00
|
|
|
import Control.Natural ((:~>) (..))
|
|
|
|
import Data.Time.Clock (getCurrentTime)
|
|
|
|
import Data.URL.Template
|
2019-10-21 19:01:05 +03:00
|
|
|
import Options.Applicative
|
2020-12-28 15:56:00 +03:00
|
|
|
import System.Environment (getEnvironment)
|
|
|
|
import System.Exit (exitFailure)
|
2019-10-21 19:01:05 +03:00
|
|
|
import Test.Hspec
|
2019-11-18 21:45:54 +03:00
|
|
|
|
2020-12-28 15:56:00 +03:00
|
|
|
import qualified Data.Aeson as A
|
|
|
|
import qualified Data.ByteString.Lazy.Char8 as BL
|
|
|
|
import qualified Data.Environment as Env
|
|
|
|
import qualified Database.PG.Query as Q
|
|
|
|
import qualified Network.HTTP.Client as HTTP
|
|
|
|
import qualified Network.HTTP.Client.TLS as HTTP
|
|
|
|
import qualified Test.Hspec.Runner as Hspec
|
|
|
|
|
|
|
|
import Hasura.RQL.DDL.Schema.Cache
|
|
|
|
import Hasura.RQL.DDL.Schema.Cache.Common
|
|
|
|
import Hasura.RQL.DDL.Schema.Source
|
|
|
|
import Hasura.RQL.Types
|
|
|
|
import Hasura.Server.Init
|
2019-11-20 21:21:30 +03:00
|
|
|
import Hasura.Server.Migrate
|
2021-02-18 19:46:14 +03:00
|
|
|
import Hasura.Server.Types
|
2020-01-23 00:55:55 +03:00
|
|
|
import Hasura.Server.Version
|
2020-12-28 15:56:00 +03:00
|
|
|
|
|
|
|
import qualified Data.NonNegativeIntSpec as NonNegetiveIntSpec
|
|
|
|
import qualified Data.Parser.CacheControlSpec as CacheControlParser
|
|
|
|
import qualified Data.Parser.JSONPathSpec as JsonPath
|
|
|
|
import qualified Data.Parser.URLTemplate as URLTemplate
|
|
|
|
import qualified Data.TimeSpec as TimeSpec
|
|
|
|
import qualified Hasura.IncrementalSpec as IncrementalSpec
|
2020-03-26 14:52:20 +03:00
|
|
|
-- import qualified Hasura.RQL.MetadataSpec as MetadataSpec
|
2020-12-28 15:56:00 +03:00
|
|
|
import qualified Hasura.CacheBoundedSpec as CacheBoundedSpec
|
2021-01-29 04:02:34 +03:00
|
|
|
import qualified Hasura.RQL.Types.EndpointSpec as EndpointSpec
|
2021-03-26 19:59:16 +03:00
|
|
|
import qualified Hasura.SQL.WKTSpec as WKTSpec
|
2020-12-28 15:56:00 +03:00
|
|
|
import qualified Hasura.Server.AuthSpec as AuthSpec
|
|
|
|
import qualified Hasura.Server.MigrateSpec as MigrateSpec
|
|
|
|
import qualified Hasura.Server.TelemetrySpec as TelemetrySpec
|
2019-11-18 21:45:54 +03:00
|
|
|
|
|
|
|
data TestSuites
|
2020-12-28 15:56:00 +03:00
|
|
|
= AllSuites !(Maybe URLTemplate)
|
2020-02-13 20:38:23 +03:00
|
|
|
-- ^ Run all test suites. It probably doesn't make sense to be able to specify additional
|
2020-01-21 21:12:27 +03:00
|
|
|
-- hspec args here.
|
|
|
|
| SingleSuite ![String] !TestSuite
|
|
|
|
-- ^ Args to pass through to hspec (as if from 'getArgs'), and the specific suite to run.
|
2019-11-18 21:45:54 +03:00
|
|
|
|
|
|
|
data TestSuite
|
|
|
|
= UnitSuite
|
2020-12-28 15:56:00 +03:00
|
|
|
| PostgresSuite !(Maybe URLTemplate)
|
2019-10-21 19:01:05 +03:00
|
|
|
|
2019-12-14 09:47:38 +03:00
|
|
|
main :: IO ()
|
2020-01-23 00:55:55 +03:00
|
|
|
main = withVersion $$(getVersionFromEnvironment) $ parseArgs >>= \case
|
2019-11-18 21:45:54 +03:00
|
|
|
AllSuites pgConnOptions -> do
|
|
|
|
postgresSpecs <- buildPostgresSpecs pgConnOptions
|
2020-01-21 21:12:27 +03:00
|
|
|
runHspec [] (unitSpecs *> postgresSpecs)
|
|
|
|
SingleSuite hspecArgs suite -> runHspec hspecArgs =<< case suite of
|
|
|
|
UnitSuite -> pure unitSpecs
|
|
|
|
PostgresSuite pgConnOptions -> buildPostgresSpecs pgConnOptions
|
2019-11-18 21:45:54 +03:00
|
|
|
|
|
|
|
unitSpecs :: Spec
|
|
|
|
unitSpecs = do
|
2020-01-14 00:56:51 +03:00
|
|
|
describe "Data.Parser.CacheControl" CacheControlParser.spec
|
2020-02-13 20:38:23 +03:00
|
|
|
describe "Data.Parser.URLTemplate" URLTemplate.spec
|
2020-08-31 19:40:01 +03:00
|
|
|
describe "Data.Parser.JSONPath" JsonPath.spec
|
2019-11-18 21:45:54 +03:00
|
|
|
describe "Hasura.Incremental" IncrementalSpec.spec
|
2020-03-26 14:52:20 +03:00
|
|
|
-- describe "Hasura.RQL.Metadata" MetadataSpec.spec -- Commenting until optimizing the test in CI
|
2020-01-16 04:56:57 +03:00
|
|
|
describe "Data.Time" TimeSpec.spec
|
2020-09-17 13:56:41 +03:00
|
|
|
describe "Data.NonNegativeInt" NonNegetiveIntSpec.spec
|
2020-01-16 04:56:57 +03:00
|
|
|
describe "Hasura.Server.Telemetry" TelemetrySpec.spec
|
2020-08-20 20:29:37 +03:00
|
|
|
describe "Hasura.Server.Auth" AuthSpec.spec
|
2020-07-28 01:21:24 +03:00
|
|
|
describe "Hasura.Cache.Bounded" CacheBoundedSpec.spec
|
2021-01-29 04:02:34 +03:00
|
|
|
describe "Hasura.RQL.Types.Endpoint" EndpointSpec.spec
|
2021-03-26 19:59:16 +03:00
|
|
|
describe "Hasura.SQL.WKT" WKTSpec.spec
|
2019-11-18 21:45:54 +03:00
|
|
|
|
2020-12-28 15:56:00 +03:00
|
|
|
buildPostgresSpecs :: HasVersion => Maybe URLTemplate -> IO Spec
|
|
|
|
buildPostgresSpecs maybeUrlTemplate = do
|
2019-11-18 21:45:54 +03:00
|
|
|
env <- getEnvironment
|
2020-12-28 15:56:00 +03:00
|
|
|
let envMap = Env.mkEnvironment env
|
|
|
|
|
|
|
|
pgUrlTemplate <- flip onLeft printErrExit $ runWithEnv env $ do
|
|
|
|
let envVar = fst databaseUrlEnv
|
|
|
|
maybeV <- withEnv maybeUrlTemplate envVar
|
|
|
|
onNothing maybeV $ throwError $
|
|
|
|
"Expected: --database-url or " <> envVar
|
2019-11-18 21:45:54 +03:00
|
|
|
|
2020-12-28 15:56:00 +03:00
|
|
|
pgUrlText <- flip onLeft printErrExit $ renderURLTemplate envMap pgUrlTemplate
|
|
|
|
let pgConnInfo = Q.ConnInfo 1 $ Q.CDDatabaseURI $ txtToBs pgUrlText
|
|
|
|
urlConf = UrlValue $ InputWebhook pgUrlTemplate
|
2021-03-16 18:27:51 +03:00
|
|
|
sourceConnInfo = PostgresSourceConnInfo urlConf (Just setPostgresPoolSettings)
|
2021-02-14 09:07:52 +03:00
|
|
|
sourceConfig = PostgresConnConfiguration sourceConnInfo Nothing
|
2019-11-18 21:45:54 +03:00
|
|
|
|
2020-12-28 15:56:00 +03:00
|
|
|
pgPool <- Q.initPGPool pgConnInfo Q.defaultConnParams { Q.cpConns = 1 } print
|
|
|
|
let pgContext = mkPGExecCtx Q.Serializable pgPool
|
|
|
|
|
|
|
|
setupCacheRef = do
|
2019-11-20 21:21:30 +03:00
|
|
|
httpManager <- HTTP.newManager HTTP.tlsManagerSettings
|
2020-12-28 15:56:00 +03:00
|
|
|
let sqlGenCtx = SQLGenCtx False
|
2021-02-18 19:46:14 +03:00
|
|
|
maintenanceMode = MaintenanceModeDisabled
|
|
|
|
serverConfigCtx =
|
[Preview] Inherited roles for postgres read queries
fixes #3868
docker image - `hasura/graphql-engine:inherited-roles-preview-48b73a2de`
Note:
To be able to use the inherited roles feature, the graphql-engine should be started with the env variable `HASURA_GRAPHQL_EXPERIMENTAL_FEATURES` set to `inherited_roles`.
Introduction
------------
This PR implements the idea of multiple roles as presented in this [paper](https://www.microsoft.com/en-us/research/wp-content/uploads/2016/02/FGALanguageICDE07.pdf). The multiple roles feature in this PR can be used via inherited roles. An inherited role is a role which can be created by combining multiple singular roles. For example, if there are two roles `author` and `editor` configured in the graphql-engine, then we can create a inherited role with the name of `combined_author_editor` role which will combine the select permissions of the `author` and `editor` roles and then make GraphQL queries using the `combined_author_editor`.
How are select permissions of different roles are combined?
------------------------------------------------------------
A select permission includes 5 things:
1. Columns accessible to the role
2. Row selection filter
3. Limit
4. Allow aggregation
5. Scalar computed fields accessible to the role
Suppose there are two roles, `role1` gives access to the `address` column with row filter `P1` and `role2` gives access to both the `address` and the `phone` column with row filter `P2` and we create a new role `combined_roles` which combines `role1` and `role2`.
Let's say the following GraphQL query is queried with the `combined_roles` role.
```graphql
query {
employees {
address
phone
}
}
```
This will translate to the following SQL query:
```sql
select
(case when (P1 or P2) then address else null end) as address,
(case when P2 then phone else null end) as phone
from employee
where (P1 or P2)
```
The other parameters of the select permission will be combined in the following manner:
1. Limit - Minimum of the limits will be the limit of the inherited role
2. Allow aggregations - If any of the role allows aggregation, then the inherited role will allow aggregation
3. Scalar computed fields - same as table column fields, as in the above example
APIs for inherited roles:
----------------------
1. `add_inherited_role`
`add_inherited_role` is the [metadata API](https://hasura.io/docs/1.0/graphql/core/api-reference/index.html#schema-metadata-api) to create a new inherited role. It accepts two arguments
`role_name`: the name of the inherited role to be added (String)
`role_set`: list of roles that need to be combined (Array of Strings)
Example:
```json
{
"type": "add_inherited_role",
"args": {
"role_name":"combined_user",
"role_set":[
"user",
"user1"
]
}
}
```
After adding the inherited role, the inherited role can be used like single roles like earlier
Note:
An inherited role can only be created with non-inherited/singular roles.
2. `drop_inherited_role`
The `drop_inherited_role` API accepts the name of the inherited role and drops it from the metadata. It accepts a single argument:
`role_name`: name of the inherited role to be dropped
Example:
```json
{
"type": "drop_inherited_role",
"args": {
"role_name":"combined_user"
}
}
```
Metadata
---------
The derived roles metadata will be included under the `experimental_features` key while exporting the metadata.
```json
{
"experimental_features": {
"derived_roles": [
{
"role_name": "manager_is_employee_too",
"role_set": [
"employee",
"manager"
]
}
]
}
}
```
Scope
------
Only postgres queries and subscriptions are supported in this PR.
Important points:
-----------------
1. All columns exposed to an inherited role will be marked as `nullable`, this is done so that cell value nullification can be done.
TODOs
-------
- [ ] Tests
- [ ] Test a GraphQL query running with a inherited role without enabling inherited roles in experimental features
- [] Tests for aggregate queries, limit, computed fields, functions, subscriptions (?)
- [ ] Introspection test with a inherited role (nullability changes in a inherited role)
- [ ] Docs
- [ ] Changelog
Co-authored-by: Vamshi Surabhi <6562944+0x777@users.noreply.github.com>
GitOrigin-RevId: 3b8ee1e11f5ceca80fe294f8c074d42fbccfec63
2021-03-08 14:14:13 +03:00
|
|
|
ServerConfigCtx FunctionPermissionsInferred RemoteSchemaPermsDisabled sqlGenCtx maintenanceMode mempty
|
2021-01-29 08:48:17 +03:00
|
|
|
cacheBuildParams = CacheBuildParams httpManager (mkPgSourceResolver print) serverConfigCtx
|
2019-11-20 21:21:30 +03:00
|
|
|
|
2020-12-28 15:56:00 +03:00
|
|
|
run :: CacheBuild a -> IO a
|
|
|
|
run =
|
|
|
|
runCacheBuild cacheBuildParams
|
2019-11-20 21:21:30 +03:00
|
|
|
>>> runExceptT
|
|
|
|
>=> flip onLeft printErrJExit
|
|
|
|
|
2020-12-28 15:56:00 +03:00
|
|
|
(metadata, schemaCache) <- run do
|
|
|
|
metadata <- snd <$> (liftEitherM . runExceptT . runLazyTx pgContext Q.ReadWrite)
|
2021-02-18 19:46:14 +03:00
|
|
|
(migrateCatalog (Just sourceConfig) maintenanceMode =<< liftIO getCurrentTime)
|
2020-12-28 15:56:00 +03:00
|
|
|
schemaCache <- buildRebuildableSchemaCache envMap metadata
|
|
|
|
pure (metadata, schemaCache)
|
|
|
|
|
2019-11-20 21:21:30 +03:00
|
|
|
cacheRef <- newMVar schemaCache
|
2020-12-28 15:56:00 +03:00
|
|
|
pure $ NT (run . flip MigrateSpec.runCacheRefT cacheRef . fmap fst . runMetadataT metadata)
|
2019-11-20 21:21:30 +03:00
|
|
|
|
|
|
|
pure $ beforeAll setupCacheRef $
|
2020-12-28 15:56:00 +03:00
|
|
|
describe "Hasura.Server.Migrate" $ MigrateSpec.spec sourceConfig pgContext pgConnInfo
|
2019-11-18 21:45:54 +03:00
|
|
|
|
|
|
|
parseArgs :: IO TestSuites
|
|
|
|
parseArgs = execParser $ info (helper <*> (parseNoCommand <|> parseSubCommand)) $
|
|
|
|
fullDesc <> header "Hasura GraphQL Engine test suite"
|
2019-10-21 19:01:05 +03:00
|
|
|
where
|
2020-12-28 15:56:00 +03:00
|
|
|
parseDbUrlTemplate =
|
|
|
|
parseDatabaseUrl <|> (fmap rawConnDetailsToUrl <$> parseRawConnDetails)
|
|
|
|
parseNoCommand = AllSuites <$> parseDbUrlTemplate
|
2020-02-13 20:38:23 +03:00
|
|
|
parseSubCommand = SingleSuite <$> parseHspecPassThroughArgs <*> subCmd
|
|
|
|
where
|
2020-01-21 21:12:27 +03:00
|
|
|
subCmd = subparser $ mconcat
|
|
|
|
[ command "unit" $ info (pure UnitSuite) $
|
|
|
|
progDesc "Only run unit tests"
|
2020-12-28 15:56:00 +03:00
|
|
|
, command "postgres" $ info (helper <*> (PostgresSuite <$> parseDbUrlTemplate)) $
|
2020-01-21 21:12:27 +03:00
|
|
|
progDesc "Only run Postgres integration tests"
|
|
|
|
]
|
|
|
|
-- Add additional arguments and tweak as needed:
|
|
|
|
hspecArgs = ["match", "skip"]
|
|
|
|
-- parse to a list of arguments as they'd appear from 'getArgs':
|
|
|
|
parseHspecPassThroughArgs :: Parser [String]
|
|
|
|
parseHspecPassThroughArgs = fmap concat $ for hspecArgs $ \nm->
|
|
|
|
fmap (maybe [] (\a -> ["--"<>nm , a])) $ optional $
|
|
|
|
strOption ( long nm <>
|
|
|
|
metavar "<PATTERN>" <>
|
|
|
|
help "Flag passed through to hspec (see hspec docs)." )
|
|
|
|
|
|
|
|
|
|
|
|
runHspec :: [String] -> Spec -> IO ()
|
|
|
|
runHspec hspecArgs m = do
|
|
|
|
config <- Hspec.readConfig Hspec.defaultConfig hspecArgs
|
2019-11-18 21:45:54 +03:00
|
|
|
Hspec.evaluateSummary =<< Hspec.runSpec m config
|
|
|
|
|
|
|
|
printErrExit :: String -> IO a
|
|
|
|
printErrExit = (*> exitFailure) . putStrLn
|
2019-11-20 21:21:30 +03:00
|
|
|
|
|
|
|
printErrJExit :: (A.ToJSON a) => a -> IO b
|
|
|
|
printErrJExit = (*> exitFailure) . BL.putStrLn . A.encode
|