Inconsistent metadata support for REST endpoints

Previously invalid REST endpoints would throw errors during schema cache build.

This PR changes the validation to instead add to the inconsistent metadata objects in order to allow use of `allow_inconsistent_metadata` with inconsistent REST endpoints.

All non-fatal endpoint definition errors are returned as inconsistent metadata warnings/errors depending on the use of `allow_inconsistent_metadata`. The endpoints with issues are then created and return informational runtime errors when they are called.

Console impact when creating endpoints is that error messages now refer to metadata inconsistencies rather than REST feature at the top level:

![image](https://user-images.githubusercontent.com/92299/109911843-ede9ec00-7cfe-11eb-9c55-7cf924d662a6.png)

<img width="969" alt="image" src="https://user-images.githubusercontent.com/92299/110258597-8336fa00-7ff7-11eb-872c-bfca945aa0e8.png">

Note: Conflicting endpoints generate one error per conflicting set of endpoints due to the implementation of `groupInconsistentMetadataById` and `imObjectIds`. This is done to ensure that error messages are terse, but may pose errors if there are some assumptions made surrounding `imObjectIds`.

Related to https://github.com/hasura/graphql-engine-mono/pull/473 (Allow Inconsistent Metadata (v2) #473 (Merged))

---

### Kodiak commit message

Changes the validation to use inconsistent metadata objects for REST endpoint issues.

#### Commit title

Inconsistent metadata for REST endpoints

GitOrigin-RevId: b9de971208e9bb0a319c57df8dace44cb115ff66
This commit is contained in:
Lyndon Maydwell 2021-03-10 16:25:12 +11:00 committed by hasura-bot
parent eccefff925
commit 0c1016e065
12 changed files with 179 additions and 40 deletions

View File

@ -28,6 +28,7 @@
- console: add relationship tab for mssql tables (#677)
- console: add permissions support for mssql tables (#677)
- build: fix the packaging of static console assets (fix #6610)
- server: make REST endpoint errors compatible with inconsistent metadata
## v2.0.0-alpha.2

View File

@ -28,6 +28,7 @@ import qualified Data.HashMap.Strict.Extended as M
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.HashSet as HS
import qualified Data.HashSet.InsOrd as HSIns
import qualified Data.Set as S
import qualified Language.GraphQL.Draft.Syntax as G
import Control.Arrow.Extended
@ -168,6 +169,12 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
duplicateVariables :: EndpointMetadata a -> Bool
duplicateVariables m = any ((>1) . length) $ group $ sort $ catMaybes $ splitPath Just (const Nothing) (_ceUrl m)
endpointObjId :: EndpointMetadata q -> MetadataObjId
endpointObjId md = MOEndpoint (_ceName md)
endpointObject :: EndpointMetadata q -> MetadataObject
endpointObject md = MetadataObject (endpointObjId md) (toJSON $ OMap.lookup (_ceName md) $ _metaRestEndpoints metadata)
-- Cases of urls that generate invalid segments:
-- * "" -> Error: "Empty URL"
-- * "/asdf" -> Error: "Leading slash not allowed"
@ -175,18 +182,22 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
-- * "asdf//qwer" -> Error: "Double Slash not allowed"
-- * "asdf/:/qwer" -> Error: "Variables must be named"
-- * "asdf/:x/qwer/:x" -> Error: "Duplicate Variable: x"
invalidSegments m = any (`elem` ["",":"]) (splitPath id id (_ceUrl m))
hasInvalidSegments :: EndpointMetadata query -> Bool
hasInvalidSegments m = any (`elem` ["",":"]) (splitPath id id (_ceUrl m))
bindA -< onJust (nonEmpty $ filter duplicateVariables (M.elems $ _boEndpoints resolvedOutputs)) $ \md ->
throw400 BadRequest $ "Duplicate variables found in endpoint paths: " <> commaSeparated (_ceUrl <$> md)
ceUrlTxt = toTxt . _ceUrl
bindA -< onJust (nonEmpty $ filter invalidSegments (M.elems $ _boEndpoints resolvedOutputs)) $ \md ->
throw400 BadRequest $ "Empty segments or unnamed variables are not allowed: " <> commaSeparated (_ceUrl <$> md)
endpoints = buildEndpointsTrie (M.elems $ _boEndpoints resolvedOutputs)
let endpoints = buildEndpointsTrie (M.elems $ _boEndpoints resolvedOutputs)
duplicateF md = DuplicateRestVariables ("Duplicate variables found in endpoint path " <> (ceUrlTxt md)) (endpointObject md)
duplicateRestVariables = map duplicateF $ filter duplicateVariables (M.elems $ _boEndpoints resolvedOutputs)
bindA -< onJust (nonEmpty $ ambiguousPaths endpoints) $ \ambPaths ->
throw409 $ "Ambiguous URL paths in endpoints: " <> commaSeparated (renderPath <$> ambPaths)
invalidF md = InvalidRestSegments ("Empty segments or unnamed variables are not allowed: " <> (ceUrlTxt md)) (endpointObject md)
invalidRestSegments = map invalidF $ filter hasInvalidSegments (M.elems $ _boEndpoints resolvedOutputs)
ambiguousF' ep = MetadataObject (endpointObjId ep) (toJSON ep)
ambiguousF mds = AmbiguousRestEndpoints ("Ambiguous URL paths: " <> commaSeparated (map _ceUrl mds)) (map ambiguousF' mds)
ambiguousRestEndpoints = map (ambiguousF . S.elems . snd) $ ambiguousPathsGrouped endpoints
returnA -< SchemaCache
{ scSources = _boSources resolvedOutputs
@ -210,6 +221,9 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
<> dependencyInconsistentObjects
<> toList gqlSchemaInconsistentObjects
<> toList relaySchemaInconsistentObjects
<> duplicateRestVariables
<> invalidRestSegments
<> ambiguousRestEndpoints
, scApiLimits = _boApiLimits resolvedOutputs
, scMetricsConfig = _boMetricsConfig resolvedOutputs
}

View File

@ -12,6 +12,7 @@ module Hasura.RQL.Types.Endpoint.Trie
, insertPath
, matchPath
, ambiguousPaths
, ambiguousPathsGrouped
)
where
@ -168,15 +169,35 @@ matchPath k path = foldMap toResult . lookupPath path
Just (_:_) -> MatchAmbiguous
_ -> maybe MatchNotFound MatchMissingKey (nonEmpty $ M.keys methodMap)
-- | A version of ambiguousPaths that attempts to group all ambiguous paths that have overlapping endpoints
ambiguousPathsGrouped :: (Hashable a, Eq k, Hashable k, Ord v, Ord a) => MultiMapTrie a k v -> [(S.Set (Path a), S.Set v)]
ambiguousPathsGrouped = groupAmbiguousPaths . map (first S.singleton) . ambiguousPaths
groupAmbiguousPaths :: (Ord a, Ord v) => [(S.Set (Path a), S.Set v)] -> [(S.Set (Path a), S.Set v)]
groupAmbiguousPaths [] = []
groupAmbiguousPaths (x:xs) =
if any fst added
then groupAmbiguousPaths $ map snd added
else x : groupAmbiguousPaths xs
where
added = map (add x) xs
add (p1, v1) (p2, v2)
| intersects v1 v2 = (True, (S.union p1 p2, S.union v1 v2))
| otherwise = (False, (p2, v2))
intersects :: Ord a => S.Set a -> S.Set a -> Bool
intersects a b = not $ S.null $ S.intersection a b
-- | Detect and return all ambiguous paths in the @MultiMapTrie@
-- A path @p@ is ambiguous if @matchPath k p@ can return @MatchAmbiguous@ for some @k@.
ambiguousPaths :: (Eq a, Hashable a, Eq k, Hashable k, Ord v) => MultiMapTrie a k v -> [Path a]
ambiguousPaths :: (Eq a, Hashable a, Eq k, Hashable k, Ord v) => MultiMapTrie a k v -> [(Path a, S.Set v)]
ambiguousPaths (Trie pathMap (MultiMap methodMap)) =
thisNodeAmbiguousPaths ++ childNodesAmbiguousPaths
where
isAmbiguous e = S.size e >= 2
thisNodeAmbiguousPaths = guard (any isAmbiguous methodMap) >> [[]]
childNodesAmbiguousPaths = uncurry childNodeAmbiguousPaths =<< M.toList pathMap
childNodeAmbiguousPaths pc t = (pc:) <$> ambiguousPaths (mergeWildcardTrie t)
wildcardTrie = M.lookup PathParam pathMap
mergeWildcardTrie = maybe id (<>) wildcardTrie
isAmbiguous e = S.size e >= 2
ambiguous = mconcat $ filter isAmbiguous $ M.elems methodMap
thisNodeAmbiguousPaths = guard (not $ null $ ambiguous) >> [([], ambiguous)]
childNodesAmbiguousPaths = uncurry childNodeAmbiguousPaths =<< M.toList pathMap
childNodeAmbiguousPaths pc t = first (pc:) <$> ambiguousPaths (mergeWildcardTrie t)
wildcardTrie = M.lookup PathParam pathMap
mergeWildcardTrie = maybe id (<>) wildcardTrie

View File

@ -140,6 +140,9 @@ data InconsistentMetadata
= InconsistentObject !Text !MetadataObject
| ConflictingObjects !Text ![MetadataObject]
| DuplicateObjects !MetadataObjId ![Value]
| DuplicateRestVariables !Text !MetadataObject
| InvalidRestSegments !Text !MetadataObject
| AmbiguousRestEndpoints !Text ![MetadataObject]
deriving (Eq)
$(makePrisms ''InconsistentMetadata)
@ -152,12 +155,18 @@ imObjectIds = \case
InconsistentObject _ metadata -> [_moId metadata]
ConflictingObjects _ metadatas -> map _moId metadatas
DuplicateObjects objectId _ -> [objectId]
DuplicateRestVariables _ md -> [_moId md]
InvalidRestSegments _ md -> [_moId md]
AmbiguousRestEndpoints _ mds -> take 1 $ map _moId mds -- TODO: Take 1 is a workaround to ensure that conflicts are not reported multiple times per endpoint.
imReason :: InconsistentMetadata -> Text
imReason = \case
InconsistentObject reason _ -> reason
ConflictingObjects reason _ -> reason
DuplicateObjects objectId _ -> "multiple definitions for " <> moiName objectId
InconsistentObject reason _ -> reason
ConflictingObjects reason _ -> reason
DuplicateObjects objectId _ -> "multiple definitions for " <> moiName objectId
DuplicateRestVariables reason _ -> reason
InvalidRestSegments reason _ -> reason
AmbiguousRestEndpoints reason _ -> reason
-- | Builds a map from each unique metadata object id to the inconsistencies associated with it.
-- Note that a single inconsistency can involve multiple metadata objects, so the same inconsistency
@ -178,6 +187,10 @@ instance ToJSON InconsistentMetadata where
[ "type" .= String (moiTypeName objectId)
, "definitions" .= definitions ]
DuplicateRestVariables _ md -> metadataObjectFields md
InvalidRestSegments _ md -> metadataObjectFields md
AmbiguousRestEndpoints _ mds -> [ "conflicts" .= map _moDefinition mds ]
metadataObjectFields (MetadataObject objectId definition) =
[ "type" .= String (moiTypeName objectId)
, "definition" .= definition ]

View File

@ -67,7 +67,7 @@ spec = describe "Endpoint" $ do
r -> expectationFailure $ "Expected MatchMissingKey. Got " <> show r
describe "ambiguousPaths" $ do
let amb = ambiguousPaths
let amb = map fst . ambiguousPaths
it "empty trie" $
amb e `shouldBe` []

View File

@ -1,11 +1,52 @@
description: Add a conflicting endpoint
url: /v1/query
status: 409
status: 400
response:
internal:
- reason: 'Ambiguous URL paths: :conflicting, simple, simple_cached, with_arg, with_args'
conflicts:
- definition:
query: query { test_table { first_name last_name } }
url: :conflicting
methods:
- GET
name: conflicting
comment:
- definition:
query: query { test_table { first_name last_name } }
url: simple
methods:
- GET
name: simple
comment:
- definition:
query: 'query @cached(ttl: 5) { test_table { first_name last_name } }'
url: simple_cached
methods:
- GET
name: simple_cached
comment:
- definition:
query: 'query ($first_name:String!) { test_table(where: {first_name: { _eq:
$first_name } }) { first_name last_name } }'
url: with_arg
methods:
- GET
- POST
name: with_arg
comment:
- definition:
query: 'query ($first_name: String!, $last_name:String!) { test_table(where:
{first_name: { _eq: $first_name } last_name: { _eq: $last_name }}) { first_name
last_name } }'
url: with_args
methods:
- GET
name: with_args
comment:
path: $.args
error: |-
Ambiguous URL paths in endpoints: with_args, with_arg, simple, simple_cached
code: conflict
error: cannot continue due to new inconsistent metadata
code: unexpected
query:
type: create_rest_endpoint
args:
@ -17,3 +58,4 @@ query:
query:
collection_name: test_collection
query_name: simple_query

View File

@ -2,10 +2,22 @@ description: Try to add an endpoint with duplicate params
url: /v1/query
status: 400
response:
internal:
- definition:
definition:
query:
collection_name: test_collection
query_name: simple_query
url: foo/:id/bar/:id
methods:
- GET
name: duplicate
comment:
reason: Duplicate variables found in endpoint path foo/:id/bar/:id
type: endpoint
path: $.args
error: |-
Duplicate variables found in endpoint paths: foo/:id/bar/:id
code: bad-request
error: cannot continue due to new inconsistent metadata
code: unexpected
query:
type: create_rest_endpoint
args:

View File

@ -10,7 +10,7 @@ query:
type: create_rest_endpoint
args:
url: ""
name: duplicate
name: empty
methods:
- GET
definition:

View File

@ -2,10 +2,22 @@ description: Tries to create an endpoint with empty path param
url: /v1/query
status: 400
response:
internal:
- definition:
definition:
query:
collection_name: test_collection
query_name: simple_query
url: foo/:/bar
methods:
- GET
name: foo
comment:
reason: 'Empty segments or unnamed variables are not allowed: foo/:/bar'
type: endpoint
path: $.args
error: |-
Empty segments or unnamed variables are not allowed: foo/:/bar
code: bad-request
error: cannot continue due to new inconsistent metadata
code: unexpected
query:
type: create_rest_endpoint
args:

View File

@ -2,10 +2,22 @@ description: Tries to create an endpoint with empty path component
url: /v1/query
status: 400
response:
internal:
- definition:
definition:
query:
collection_name: test_collection
query_name: simple_query
url: foo//bar
methods:
- GET
name: foo
comment:
reason: 'Empty segments or unnamed variables are not allowed: foo//bar'
type: endpoint
path: $.args
error: |-
Empty segments or unnamed variables are not allowed: foo//bar
code: bad-request
error: cannot continue due to new inconsistent metadata
code: unexpected
query:
type: create_rest_endpoint
args:

View File

@ -2,15 +2,27 @@ description: Tries to create an endpoint with trailing slash
url: /v1/query
status: 400
response:
internal:
- definition:
definition:
query:
collection_name: test_collection
query_name: simple_query
url: foo/
methods:
- GET
name: trailing
comment:
reason: 'Empty segments or unnamed variables are not allowed: foo/'
type: endpoint
path: $.args
error: |-
Empty segments or unnamed variables are not allowed: foo/
code: bad-request
error: cannot continue due to new inconsistent metadata
code: unexpected
query:
type: create_rest_endpoint
args:
url: "foo/"
name: duplicate
name: trailing
methods:
- GET
definition:

View File

@ -63,10 +63,10 @@ class TestCustomEndpoints:
def test_endpoint_empty_path(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/endpoint_empty_path.yaml', transport)
def test_endpoint_trailing_slash(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/endpoint_trailing_slash.yaml', transport)
def test_endpoint_empty_path_segment(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/endpoint_empty_path_segment.yaml', transport)
@ -74,4 +74,4 @@ class TestCustomEndpoints:
check_query_f(hge_ctx, self.dir() + '/endpoint_empty_path_param.yaml', transport)
def test_endpoint_subscription(self, hge_ctx, transport):
check_query_f(hge_ctx, self.dir() + '/endpoint_subscription.yaml', transport)
check_query_f(hge_ctx, self.dir() + '/endpoint_subscription.yaml', transport)