graphql-engine/server/src-test/Main.hs

225 lines
9.1 KiB
Haskell
Raw Normal View History

{-# LANGUAGE UndecidableInstances #-}
module Main (main) where
import Control.Concurrent.MVar
import Control.Natural ((:~>) (..))
import Data.Aeson qualified as A
import Data.ByteString.Lazy.Char8 qualified as BL
import Data.Environment qualified as Env
import Data.NonNegativeIntSpec qualified as NonNegetiveIntSpec
import Data.Parser.CacheControlSpec qualified as CacheControlParser
import Data.Parser.JSONPathSpec qualified as JsonPath
import Data.Time.Clock (getCurrentTime)
import Data.TimeSpec qualified as TimeSpec
import Data.URL.Template
import Database.MSSQL.TransactionSpec qualified as TransactionSpec
import Database.PG.Query qualified as Q
import Hasura.App
( PGMetadataStorageAppT (..),
mkPgSourceResolver,
)
import Hasura.EventingSpec qualified as EventingSpec
import Hasura.GraphQL.Parser.DirectivesTest qualified as GraphQLDirectivesSpec
import Hasura.GraphQL.RemoteServerSpec qualified as RemoteServerSpec
import Hasura.GraphQL.Schema.RemoteTest qualified as GraphRemoteSchemaSpec
import Hasura.IncrementalSpec qualified as IncrementalSpec
import Hasura.Metadata.Class
import Hasura.Prelude
import Hasura.RQL.DDL.Schema.Cache
import Hasura.RQL.DDL.Schema.Cache.Common
import Hasura.RQL.PermissionSpec qualified as PermSpec
import Hasura.RQL.RequestTransformSpec qualified as RequestTransformSpec
import Hasura.RQL.Types
import Hasura.RQL.Types.CommonSpec qualified as CommonTypesSpec
import Hasura.RQL.Types.EndpointSpec qualified as EndpointSpec
import Hasura.SQL.WKTSpec qualified as WKTSpec
import Hasura.Server.AuthSpec qualified as AuthSpec
import Hasura.Server.Init
import Hasura.Server.Migrate
import Hasura.Server.MigrateSpec qualified as MigrateSpec
import Hasura.Server.TelemetrySpec qualified as TelemetrySpec
import Hasura.Server.Types
import Hasura.Server.Version
import Hasura.Server.Version.TH
import Network.HTTP.Client qualified as HTTP
import Network.HTTP.Client.TLS qualified as HTTP
import Network.HTTP.Client.TransformableSpec qualified as TransformableSpec
import Options.Applicative
import System.Environment (getEnvironment)
import System.Exit (exitFailure)
import Test.Hspec
import Test.Hspec.Runner qualified as Hspec
data TestSuites
= -- | Run all test suites. It probably doesn't make sense to be able to specify additional
-- hspec args here.
AllSuites !(Maybe URLTemplate) !(Maybe URLTemplate)
| -- | Args to pass through to hspec (as if from 'getArgs'), and the specific suite to run.
SingleSuite ![String] !TestSuite
data TestSuite
= UnitSuite
| PostgresSuite !(Maybe URLTemplate)
| MSSQLSuite !(Maybe URLTemplate)
main :: IO ()
main =
withVersion $$(getVersionFromEnvironment) $
parseArgs >>= \case
AllSuites pgConnOptions mssqlConnOptions -> do
postgresSpecs <- buildPostgresSpecs pgConnOptions
mssqlSpecs <- buildMSSQLSpecs mssqlConnOptions
runHspec [] (unitSpecs *> postgresSpecs *> mssqlSpecs)
SingleSuite hspecArgs suite ->
runHspec hspecArgs =<< case suite of
UnitSuite -> pure unitSpecs
PostgresSuite pgConnOptions -> buildPostgresSpecs pgConnOptions
MSSQLSuite mssqlConnOptions -> buildMSSQLSpecs mssqlConnOptions
unitSpecs :: Spec
unitSpecs = do
describe "Data.NonNegativeInt" NonNegetiveIntSpec.spec
describe "Data.Parser.CacheControl" CacheControlParser.spec
describe "Data.Parser.JSONPath" JsonPath.spec
describe "Data.Time" TimeSpec.spec
describe "Hasura.Eventing" EventingSpec.spec
describe "Hasura.GraphQL.Parser.Directives" GraphQLDirectivesSpec.spec
describe "Hasura.GraphQL.Schema.Remote" GraphRemoteSchemaSpec.spec
describe "Hasura.Incremental" IncrementalSpec.spec
describe "Hasura.RQL.Types.Common" CommonTypesSpec.spec
describe "Hasura.RQL.Types.Endpoint" EndpointSpec.spec
describe "Hasura.GraphQL.RemoteServer" RemoteServerSpec.spec
describe "Hasura.SQL.WKT" WKTSpec.spec
describe "Hasura.Server.Auth" AuthSpec.spec
describe "Hasura.Server.Telemetry" TelemetrySpec.spec
describe "Hasura.RQL.PermissionSpec" PermSpec.spec
describe "Hasura.RQL.RequestTransformSpec" RequestTransformSpec.spec
describe "Network.HTTP.Client.TransformableSpec" TransformableSpec.spec
buildMSSQLSpecs :: Maybe URLTemplate -> IO Spec
buildMSSQLSpecs maybeUrlTemplate = do
env <- liftIO getEnvironment
let envMap = Env.mkEnvironment env
urlTemplate <- flip onLeft printErrExit $
runWithEnv env $ do
let envVar = fst mssqlConnectionString
maybeV <- withEnv maybeUrlTemplate envVar
onNothing maybeV $
throwError $
"Expected: " <> envVar
connStr <- flip onLeft printErrExit $ renderURLTemplate envMap urlTemplate
pure $ describe "Database.MSSQL.TransactionSpec" $ TransactionSpec.spec connStr
mssqlConnectionString :: (String, String)
mssqlConnectionString =
( "HASURA_MSSQL_CONN_STR",
"SQL Server database connection string. Example DRIVER={ODBC Driver 17 for SQL Server};SERVER=$IP_ADDRESS,$PORT;Uid=$USER;Pwd=$PASSWORD;"
)
buildPostgresSpecs :: HasVersion => Maybe URLTemplate -> IO Spec
buildPostgresSpecs maybeUrlTemplate = do
env <- getEnvironment
let envMap = Env.mkEnvironment env
pgUrlTemplate <- flip onLeft printErrExit $
runWithEnv env $ do
let envVar = fst databaseUrlEnv
maybeV <- withEnv maybeUrlTemplate envVar
onNothing maybeV $
throwError $
"Expected: --database-url or " <> envVar
pgUrlText <- flip onLeft printErrExit $ renderURLTemplate envMap pgUrlTemplate
let pgConnInfo = Q.ConnInfo 1 $ Q.CDDatabaseURI $ txtToBs pgUrlText
urlConf = UrlValue $ InputWebhook pgUrlTemplate
sourceConnInfo =
PostgresSourceConnInfo urlConf (Just setPostgresPoolSettings) True Q.ReadCommitted Nothing
sourceConfig = PostgresConnConfiguration sourceConnInfo Nothing
pgPool <- Q.initPGPool pgConnInfo Q.defaultConnParams {Q.cpConns = 1} print
let pgContext = mkPGExecCtx Q.Serializable pgPool
setupCacheRef = do
httpManager <- HTTP.newManager HTTP.tlsManagerSettings
let sqlGenCtx = SQLGenCtx False False
maintenanceMode = MaintenanceModeDisabled
serverConfigCtx =
[Preview] Inherited roles for postgres read queries fixes #3868 docker image - `hasura/graphql-engine:inherited-roles-preview-48b73a2de` Note: To be able to use the inherited roles feature, the graphql-engine should be started with the env variable `HASURA_GRAPHQL_EXPERIMENTAL_FEATURES` set to `inherited_roles`. Introduction ------------ This PR implements the idea of multiple roles as presented in this [paper](https://www.microsoft.com/en-us/research/wp-content/uploads/2016/02/FGALanguageICDE07.pdf). The multiple roles feature in this PR can be used via inherited roles. An inherited role is a role which can be created by combining multiple singular roles. For example, if there are two roles `author` and `editor` configured in the graphql-engine, then we can create a inherited role with the name of `combined_author_editor` role which will combine the select permissions of the `author` and `editor` roles and then make GraphQL queries using the `combined_author_editor`. How are select permissions of different roles are combined? ------------------------------------------------------------ A select permission includes 5 things: 1. Columns accessible to the role 2. Row selection filter 3. Limit 4. Allow aggregation 5. Scalar computed fields accessible to the role Suppose there are two roles, `role1` gives access to the `address` column with row filter `P1` and `role2` gives access to both the `address` and the `phone` column with row filter `P2` and we create a new role `combined_roles` which combines `role1` and `role2`. Let's say the following GraphQL query is queried with the `combined_roles` role. ```graphql query { employees { address phone } } ``` This will translate to the following SQL query: ```sql select (case when (P1 or P2) then address else null end) as address, (case when P2 then phone else null end) as phone from employee where (P1 or P2) ``` The other parameters of the select permission will be combined in the following manner: 1. Limit - Minimum of the limits will be the limit of the inherited role 2. Allow aggregations - If any of the role allows aggregation, then the inherited role will allow aggregation 3. Scalar computed fields - same as table column fields, as in the above example APIs for inherited roles: ---------------------- 1. `add_inherited_role` `add_inherited_role` is the [metadata API](https://hasura.io/docs/1.0/graphql/core/api-reference/index.html#schema-metadata-api) to create a new inherited role. It accepts two arguments `role_name`: the name of the inherited role to be added (String) `role_set`: list of roles that need to be combined (Array of Strings) Example: ```json { "type": "add_inherited_role", "args": { "role_name":"combined_user", "role_set":[ "user", "user1" ] } } ``` After adding the inherited role, the inherited role can be used like single roles like earlier Note: An inherited role can only be created with non-inherited/singular roles. 2. `drop_inherited_role` The `drop_inherited_role` API accepts the name of the inherited role and drops it from the metadata. It accepts a single argument: `role_name`: name of the inherited role to be dropped Example: ```json { "type": "drop_inherited_role", "args": { "role_name":"combined_user" } } ``` Metadata --------- The derived roles metadata will be included under the `experimental_features` key while exporting the metadata. ```json { "experimental_features": { "derived_roles": [ { "role_name": "manager_is_employee_too", "role_set": [ "employee", "manager" ] } ] } } ``` Scope ------ Only postgres queries and subscriptions are supported in this PR. Important points: ----------------- 1. All columns exposed to an inherited role will be marked as `nullable`, this is done so that cell value nullification can be done. TODOs ------- - [ ] Tests - [ ] Test a GraphQL query running with a inherited role without enabling inherited roles in experimental features - [] Tests for aggregate queries, limit, computed fields, functions, subscriptions (?) - [ ] Introspection test with a inherited role (nullability changes in a inherited role) - [ ] Docs - [ ] Changelog Co-authored-by: Vamshi Surabhi <6562944+0x777@users.noreply.github.com> GitOrigin-RevId: 3b8ee1e11f5ceca80fe294f8c074d42fbccfec63
2021-03-08 14:14:13 +03:00
ServerConfigCtx FunctionPermissionsInferred RemoteSchemaPermsDisabled sqlGenCtx maintenanceMode mempty
cacheBuildParams = CacheBuildParams httpManager (mkPgSourceResolver print) serverConfigCtx
pgLogger = print
run :: MetadataStorageT (PGMetadataStorageAppT CacheBuild) a -> IO a
run =
runMetadataStorageT
>>> flip runPGMetadataStorageAppT (pgPool, pgLogger)
>>> runCacheBuild cacheBuildParams
>>> runExceptT
>=> flip onLeft printErrJExit
>=> flip onLeft printErrJExit
(metadata, schemaCache) <- run do
metadata <-
snd
<$> (liftEitherM . runExceptT . runTx pgContext Q.ReadWrite)
(migrateCatalog (Just sourceConfig) maintenanceMode =<< liftIO getCurrentTime)
schemaCache <- lift $ lift $ buildRebuildableSchemaCache envMap metadata
pure (metadata, schemaCache)
cacheRef <- newMVar schemaCache
pure $ NT (run . flip MigrateSpec.runCacheRefT cacheRef . fmap fst . runMetadataT metadata)
pure $
beforeAll setupCacheRef $
describe "Hasura.Server.Migrate" $ MigrateSpec.spec sourceConfig pgContext pgConnInfo
parseArgs :: IO TestSuites
parseArgs =
execParser $
info (helper <*> (parseNoCommand <|> parseSubCommand)) $
fullDesc <> header "Hasura GraphQL Engine test suite"
where
parseDbUrlTemplate =
parseDatabaseUrl <|> (fmap rawConnDetailsToUrl <$> parseRawConnDetails)
parseNoCommand = AllSuites <$> parseDbUrlTemplate <*> parseDbUrlTemplate
allow custom mutations through actions (#3042) * basic doc for actions * custom_types, sync and async actions * switch to graphql-parser-hs on github * update docs * metadata import/export * webhook calls are now supported * relationships in sync actions * initialise.sql is now in sync with the migration file * fix metadata tests * allow specifying arguments of actions * fix blacklist check on check_build_worthiness job * track custom_types and actions related tables * handlers are now triggered on async actions * default to pgjson unless a field is involved in relationships, for generating definition list * use 'true' for action filter for non admin role * fix create_action_permission sql query * drop permissions when dropping an action * add a hdb_role view (and relationships) to fetch all roles in the system * rename 'webhook' key in action definition to 'handler' * allow templating actions wehook URLs with env vars * add 'update_action' /v1/query type * allow forwarding client headers by setting `forward_client_headers` in action definition * add 'headers' configuration in action definition * handle webhook error response based on status codes * support array relationships for custom types * implement single row mutation, see https://github.com/hasura/graphql-engine/issues/3731 * single row mutation: rename 'pk_columns' -> 'columns' and no-op refactor * use top level primary key inputs for delete_by_pk & account select permissions for single row mutations * use only REST semantics to resolve the webhook response * use 'pk_columns' instead of 'columns' for update_by_pk input * add python basic tests for single row mutations * add action context (name) in webhook payload * Async action response is accessible for non admin roles only if the request session vars equals to action's * clean nulls, empty arrays for actions, custom types in export metadata * async action mutation returns only the UUID of the action * unit tests for URL template parser * Basic sync actions python tests * fix output in async query & add async tests * add admin secret header in async actions python test * document async action architecture in Resolve/Action.hs file * support actions returning array of objects * tests for list type response actions * update docs with actions and custom types metadata API reference * update actions python tests as per #f8e1330 Co-authored-by: Tirumarai Selvan <tirumarai.selvan@gmail.com> Co-authored-by: Aravind Shankar <face11301@gmail.com> Co-authored-by: Rakesh Emmadi <12475069+rakeshkky@users.noreply.github.com>
2020-02-13 20:38:23 +03:00
parseSubCommand = SingleSuite <$> parseHspecPassThroughArgs <*> subCmd
where
subCmd =
subparser $
mconcat
[ command "unit" $
info (pure UnitSuite) $
progDesc "Only run unit tests",
command "postgres" $
info (helper <*> (PostgresSuite <$> parseDbUrlTemplate)) $
progDesc "Only run Postgres integration tests",
command "mssql" $
info (helper <*> (MSSQLSuite <$> parseDbUrlTemplate)) $
progDesc "Only run SQL Server unit tests"
]
-- Add additional arguments and tweak as needed:
hspecArgs = ["match", "skip"]
-- parse to a list of arguments as they'd appear from 'getArgs':
parseHspecPassThroughArgs :: Parser [String]
parseHspecPassThroughArgs = fmap concat $
for hspecArgs $ \nm ->
fmap (maybe [] (\a -> ["--" <> nm, a])) $
optional $
strOption
( long nm
<> metavar "<PATTERN>"
<> help "Flag passed through to hspec (see hspec docs)."
)
runHspec :: [String] -> Spec -> IO ()
runHspec hspecArgs m = do
config <- Hspec.readConfig Hspec.defaultConfig hspecArgs
Hspec.evaluateSummary =<< Hspec.runSpec m config
printErrExit :: String -> IO a
printErrExit = (*> exitFailure) . putStrLn
printErrJExit :: (A.ToJSON a) => a -> IO b
printErrJExit = (*> exitFailure) . BL.putStrLn . A.encode