server: Preserve ordering when possible, and sort when it's not.

When upgrading to GHC v9.4, we noticed a number of failures because the sort order of HashMaps has changed. With this changeset, I am endeavoring to mitigate this now and in the future.

This makes one of two changes in a few areas where we depend on the sort order of elements in a `HashMap`:

  1. the ordering of the request is preserved with `InsOrdHashMap`, or
  2. we sort the data after retrieving it.

Fortunately, we do not do this anywhere where we _must_ preserve order; it's "just" descriptions, error messages, and OpenAPI metadata. The main problem is that tests are likely to fail each time we upgrade GHC (or whatever is providing the hash seed).

[NDAT-705]: https://hasurahq.atlassian.net/browse/NDAT-705?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/9390
GitOrigin-RevId: 84503e029b44094edbbc298651744bc2843c15f3
This commit is contained in:
Samir Talwar 2023-06-02 11:29:48 +02:00 committed by hasura-bot
parent f6cb0d7310
commit dd46aa6715
26 changed files with 153 additions and 150 deletions

View File

@ -24,6 +24,7 @@ import Data.Function ((&))
import Data.Functor ((<&>))
import Data.HashMap.Strict (HashMap)
import Data.HashMap.Strict qualified as HashMap
import Data.HashMap.Strict.InsOrd (InsOrdHashMap)
import Data.HashMap.Strict.InsOrd qualified as InsOrdHashMap
import Data.HashSet qualified as S
import Data.Hashable (Hashable)
@ -175,7 +176,7 @@ selectionSet ::
Name ->
Maybe Description ->
[FieldParser origin m a] ->
Parser origin 'Output m (InsOrdHashMap.InsOrdHashMap Name (ParsedSelection a))
Parser origin 'Output m (InsOrdHashMap Name (ParsedSelection a))
selectionSet name desc fields = selectionSetObject name desc fields []
safeSelectionSet ::
@ -184,7 +185,7 @@ safeSelectionSet ::
Name ->
Maybe Description ->
[FieldParser origin m a] ->
n (Parser origin 'Output m (InsOrdHashMap.InsOrdHashMap Name (ParsedSelection a)))
n (Parser origin 'Output m (InsOrdHashMap Name (ParsedSelection a)))
{-# INLINE safeSelectionSet #-}
safeSelectionSet name description fields =
case duplicatesList of
@ -194,10 +195,14 @@ safeSelectionSet name description fields =
-- plural
printedDuplicates -> throwError $ "Encountered conflicting definitions in the selection set for " <> toErrorValue name <> " for fields: " <> toErrorValue printedDuplicates <> ". Fields must not be defined more than once across all sources."
where
namesOrigins :: HashMap Name [Maybe origin]
namesOrigins = HashMap.fromListWith (<>) $ (dName &&& (pure . dOrigin)) . fDefinition <$> fields
duplicates :: HashMap Name [Maybe origin]
duplicates = HashMap.filter ((> 1) . length) namesOrigins
namesOrigins :: InsOrdHashMap Name [Maybe origin]
namesOrigins =
foldr
(uncurry (InsOrdHashMap.insertWith (<>)))
InsOrdHashMap.empty
((dName &&& (pure . dOrigin)) . fDefinition <$> fields)
duplicates :: InsOrdHashMap Name [Maybe origin]
duplicates = InsOrdHashMap.filter ((> 1) . length) namesOrigins
uniques = S.toList . S.fromList
printEntry (fieldName, originsM) =
let origins = uniques $ catMaybes originsM
@ -207,7 +212,7 @@ safeSelectionSet name description fields =
toErrorValue fieldName <> " defined in " <> toErrorValue origins <> " and of unknown origin"
| otherwise ->
toErrorValue fieldName <> " defined in " <> toErrorValue origins
duplicatesList = printEntry <$> HashMap.toList duplicates
duplicatesList = printEntry <$> InsOrdHashMap.toList duplicates
-- Should this rather take a non-empty `FieldParser` list?
-- See also Note [Selectability of tables].

View File

@ -450,7 +450,7 @@ columnParser columnType nullability = case columnType of
`onLeft` (P.parseErrorWith P.ParseFailed . toErrorMessage . qeError)
}
ColumnEnumReference (EnumReference tableName enumValues tableCustomName) ->
case nonEmpty (HashMap.toList enumValues) of
case nonEmpty . sortOn fst $ HashMap.toList enumValues of
Just enumValuesList ->
peelWithOrigin
. fmap (ColumnValue columnType)

View File

@ -11,6 +11,7 @@ module Hasura.Backends.Postgres.Translate.Select.Internal.GenerateSelect
where
import Data.HashMap.Strict qualified as HashMap
import Data.HashMap.Strict.InsOrd qualified as InsOrdHashMap
import Data.List.NonEmpty qualified as NE
import Hasura.Backends.Postgres.SQL.DML qualified as S
import Hasura.Backends.Postgres.SQL.Types
@ -42,7 +43,7 @@ generateSQLSelect ::
generateSQLSelect joinCondition selectSource selectNode =
S.mkSelect
{ S.selExtr =
case [S.Extractor e $ Just a | (a, e) <- HashMap.toList extractors] of
case [S.Extractor e $ Just a | (a, e) <- InsOrdHashMap.toList extractors] of
-- If the select list is empty we will generated code which looks like this:
-- > SELECT FROM ...
-- This works for postgres, but not for cockroach, which expects a non-empty

View File

@ -43,7 +43,7 @@ withWriteObjectRelation ::
(MonadWriter SelectWriter m) =>
m
( ObjectRelationSource,
HashMap.HashMap S.ColumnAlias S.SQLExp,
InsOrdHashMap S.ColumnAlias S.SQLExp,
a
) ->
m a
@ -61,7 +61,7 @@ withWriteArrayRelation ::
m
( ArrayRelationSource,
S.Extractor,
HashMap.HashMap S.ColumnAlias S.SQLExp,
InsOrdHashMap S.ColumnAlias S.SQLExp,
a
) ->
m a
@ -81,7 +81,7 @@ withWriteArrayConnection ::
m
( ArrayConnectionSource,
S.Extractor,
HashMap.HashMap S.ColumnAlias S.SQLExp,
InsOrdHashMap S.ColumnAlias S.SQLExp,
a
) ->
m a
@ -101,7 +101,7 @@ withWriteComputedFieldTableSet ::
m
( ComputedFieldTableSetSource,
S.Extractor,
HashMap.HashMap S.ColumnAlias S.SQLExp,
InsOrdHashMap S.ColumnAlias S.SQLExp,
a
) ->
m a

View File

@ -5,7 +5,7 @@ module Hasura.Backends.Postgres.Translate.Select.Internal.OrderBy
where
import Control.Lens ((^?))
import Data.HashMap.Strict qualified as HashMap
import Data.HashMap.Strict.InsOrd qualified as InsOrdHashMap
import Data.List.NonEmpty qualified as NE
import Hasura.Backends.Postgres.SQL.DML qualified as S
import Hasura.Backends.Postgres.SQL.Types
@ -133,7 +133,7 @@ processOrderByItems sourcePrefix' fieldAlias' similarArrayFields distOnCols = \c
relSource = ObjectRelationSource relName colMapping selectSource Nullable
pure
( relSource,
HashMap.singleton relOrderByAlias relOrdByExp,
InsOrdHashMap.singleton relOrderByAlias relOrdByExp,
S.mkQIdenExp relSourcePrefix relOrderByAlias
)
AOCArrayAggregation relInfo relFilter aggOrderBy -> withWriteArrayRelation $ do
@ -160,7 +160,7 @@ processOrderByItems sourcePrefix' fieldAlias' similarArrayFields distOnCols = \c
pure
( relSource,
topExtractor,
HashMap.fromList $ aggregateFieldsToExtractorExps relSourcePrefix fields,
InsOrdHashMap.fromList $ aggregateFieldsToExtractorExps relSourcePrefix fields,
S.mkQIdenExp relSourcePrefix (mkAggregateOrderByAlias aggOrderBy)
)
AOCComputedField ComputedFieldOrderBy {..} ->
@ -186,7 +186,7 @@ processOrderByItems sourcePrefix' fieldAlias' similarArrayFields distOnCols = \c
pure
( source,
topExtractor,
HashMap.fromList $ aggregateFieldsToExtractorExps computedFieldSourcePrefix fields,
InsOrdHashMap.fromList $ aggregateFieldsToExtractorExps computedFieldSourcePrefix fields,
S.mkQIdenExp computedFieldSourcePrefix (mkAggregateOrderByAlias aggOrderBy)
)

View File

@ -24,6 +24,7 @@ module Hasura.Backends.Postgres.Translate.Select.Internal.Process
where
import Data.HashMap.Strict qualified as HashMap
import Data.HashMap.Strict.InsOrd qualified as InsOrdHashMap
import Data.List.NonEmpty qualified as NE
import Data.Text.Extended (ToTxt (toTxt))
import Data.Text.NonEmpty qualified as TNE
@ -205,7 +206,7 @@ processAnnAggregateSelect ::
AnnAggregateSelect ('Postgres pgKind) ->
m
( SelectSource,
HashMap.HashMap S.ColumnAlias S.SQLExp,
InsOrdHashMap S.ColumnAlias S.SQLExp,
S.Extractor
)
processAnnAggregateSelect sourcePrefixes fieldAlias annAggSel = do
@ -247,7 +248,7 @@ processAnnAggregateSelect sourcePrefixes fieldAlias annAggSel = do
$ flip concatMap (map (second snd) processedFields)
$ \(FieldName fieldText, fieldExp) -> [S.SELit fieldText, fieldExp]
nodeExtractors =
HashMap.fromList
InsOrdHashMap.fromList
$ concatMap (fst . snd) processedFields
<> orderByAndDistinctExtrs
@ -347,7 +348,7 @@ processAnnFields sourcePrefix fieldAlias similarArrFields annFields tCase = do
annFieldsExtr <- processAnnFields (identifierToTableIdentifier $ _pfThis sourcePrefixes) fieldName HashMap.empty objAnnFields tCase
pure
( objRelSource,
HashMap.fromList [annFieldsExtr],
uncurry InsOrdHashMap.singleton annFieldsExtr,
S.mkQIdenExp objRelSourcePrefix fieldName
)
AFArrayRelation arrSel -> do
@ -602,7 +603,7 @@ processAnnSimpleSelect ::
AnnSimpleSelect ('Postgres pgKind) ->
m
( SelectSource,
HashMap.HashMap S.ColumnAlias S.SQLExp
InsOrdHashMap S.ColumnAlias S.SQLExp
)
processAnnSimpleSelect sourcePrefixes fieldAlias permLimitSubQuery annSimpleSel = do
(selectSource, orderByAndDistinctExtrs, _) <-
@ -621,7 +622,7 @@ processAnnSimpleSelect sourcePrefixes fieldAlias permLimitSubQuery annSimpleSel
similarArrayFields
annSelFields
tCase
let allExtractors = HashMap.fromList $ annFieldsExtr : orderByAndDistinctExtrs
let allExtractors = InsOrdHashMap.fromList $ annFieldsExtr : orderByAndDistinctExtrs
pure (selectSource, allExtractors)
where
AnnSelectG annSelFields tableFrom tablePermissions tableArgs _ tCase = annSimpleSel
@ -644,7 +645,7 @@ processConnectionSelect ::
m
( ArrayConnectionSource,
S.Extractor,
HashMap.HashMap S.ColumnAlias S.SQLExp
InsOrdHashMap S.ColumnAlias S.SQLExp
)
processConnectionSelect sourcePrefixes fieldAlias relAlias colMapping connectionSelect = do
(selectSource, orderByAndDistinctExtrs, maybeOrderByCursor) <-
@ -666,7 +667,7 @@ processConnectionSelect sourcePrefixes fieldAlias relAlias colMapping connection
mkCursorExtractor primaryKeyColumnsObjectExp : primaryKeyColumnExtractors
(topExtractorExp, exps) <- flip runStateT [] $ processFields selectSource
let topExtractor = S.Extractor topExtractorExp $ Just $ S.toColumnAlias fieldIdentifier
allExtractors = HashMap.fromList $ cursorExtractors <> exps <> orderByAndDistinctExtrs
allExtractors = InsOrdHashMap.fromList $ cursorExtractors <> exps <> orderByAndDistinctExtrs
arrayConnectionSource =
ArrayConnectionSource
relAlias

View File

@ -136,7 +136,7 @@ applySortingAndSlicing SortingAndSlicing {..} =
ApplySortingAndSlicing (Nothing, noSlicing, Nothing) (Just nodeOrderBy, _sasSlicing, nodeDistinctOn)
data SelectNode = SelectNode
{ _snExtractors :: HashMap.HashMap Postgres.ColumnAlias Postgres.SQLExp,
{ _snExtractors :: InsOrdHashMap Postgres.ColumnAlias Postgres.SQLExp,
_snJoinTree :: JoinTree
}
deriving stock (Eq, Show)

View File

@ -258,7 +258,7 @@ tableSelectFields tableInfo = do
tableColumns ::
forall b. TableInfo b -> [ColumnInfo b]
tableColumns tableInfo =
mapMaybe columnInfo . HashMap.elems . _tciFieldInfoMap . _tiCoreInfo $ tableInfo
sortOn ciPosition . mapMaybe columnInfo . HashMap.elems . _tciFieldInfoMap . _tiCoreInfo $ tableInfo
where
columnInfo (FIColumn (SCIScalarColumn ci)) = Just ci
columnInfo _ = Nothing

View File

@ -159,7 +159,6 @@ parseScalarValueColumnType ::
parseScalarValueColumnType columnType value = case columnType of
ColumnScalar scalarType -> liftEither $ parseScalarValue @b scalarType value
ColumnEnumReference (EnumReference tableName enumValues _) ->
-- maybe (pure $ PGNull PGText) parseEnumValue =<< decodeValue value
parseEnumValue =<< decodeValue value
where
parseEnumValue :: Maybe G.Name -> m (ScalarValue b)
@ -169,7 +168,7 @@ parseScalarValueColumnType columnType value = case columnType of
unless (evn `elem` enums)
$ throw400 UnexpectedPayload
$ "expected one of the values "
<> dquoteList enums
<> dquoteList (sort enums)
<> " for type "
<> snakeCaseTableName @b tableName
<<> ", given "

View File

@ -122,7 +122,7 @@ buildEndpoint schemaTypes method EndpointMetadata {..} = do
-- We expect one optional parameter per known scalar variable.
collectParams :: Structure -> EndpointUrl -> [Referenced Param]
collectParams (Structure _ vars) eURL = do
(G.unName -> varName, VariableInfo {..}) <- HashMap.toList vars
(G.unName -> varName, VariableInfo {..}) <- sortOn fst $ HashMap.toList vars
case _viTypeInfo of
-- we do not allow input objects or enums in parameters
InputFieldObjectInfo _ -> empty

View File

@ -44,13 +44,7 @@ import Test.Hspec
import Test.Hspec.Extended
import Test.Parser.Field qualified as GQL
import Test.Parser.Internal
( ColumnInfoBuilder
( ColumnInfoBuilder,
cibIsPrimaryKey,
cibName,
cibNullable,
cibType
),
( ColumnInfoBuilder (..),
TableInfoBuilder (columns, relations),
buildTableInfo,
mkTable,
@ -255,12 +249,14 @@ spec = do
{ columns =
[ ColumnInfoBuilder
{ cibName = "id",
cibPosition = 0,
cibType = ColumnScalar PGInteger,
cibNullable = False,
cibIsPrimaryKey = True
},
ColumnInfoBuilder
{ cibName = "title",
cibPosition = 1,
cibType = ColumnScalar PGText,
cibNullable = False,
cibIsPrimaryKey = False
@ -277,24 +273,28 @@ spec = do
{ columns =
[ ColumnInfoBuilder
{ cibName = "id",
cibPosition = 0,
cibType = ColumnScalar PGInteger,
cibNullable = False,
cibIsPrimaryKey = True
},
ColumnInfoBuilder
{ cibName = "title",
cibPosition = 1,
cibType = ColumnScalar PGText,
cibNullable = False,
cibIsPrimaryKey = False
},
ColumnInfoBuilder
{ cibName = "duration_seconds",
cibPosition = 2,
cibType = ColumnScalar PGInteger,
cibNullable = False,
cibIsPrimaryKey = False
},
ColumnInfoBuilder
{ cibName = "album_id",
cibPosition = 3,
cibType = ColumnScalar PGInteger,
cibNullable = False,
cibIsPrimaryKey = False

View File

@ -79,12 +79,13 @@ spec = do
}
|]
}
describe "update many" do
it "one update" do
runUpdateFieldTest
UpdateTestSetup
{ utsTable = "artist",
utsColumns = [P.nameColumnBuilder, P.descColumnBuilder, P.idColumnBuilder],
utsColumns = [P.idColumnBuilder, P.nameColumnBuilder, P.descColumnBuilder],
utsExpect =
UpdateExpectationBuilder
{ utbOutput = MOutMultirowFields [("affected_rows", MCount)],
@ -117,7 +118,7 @@ spec = do
runUpdateFieldTest
UpdateTestSetup
{ utsTable = "artist",
utsColumns = [P.nameColumnBuilder, P.descColumnBuilder, P.idColumnBuilder],
utsColumns = [P.idColumnBuilder, P.nameColumnBuilder, P.descColumnBuilder],
utsExpect =
UpdateExpectationBuilder
{ utbOutput = MOutMultirowFields [("affected_rows", MCount)],
@ -157,7 +158,7 @@ spec = do
runUpdateFieldTest
UpdateTestSetup
{ utsTable = "artist",
utsColumns = [P.nameColumnBuilder, P.descColumnBuilder, P.idColumnBuilder],
utsColumns = [P.idColumnBuilder, P.nameColumnBuilder, P.descColumnBuilder],
utsExpect =
UpdateExpectationBuilder
{ utbOutput = MOutMultirowFields [("affected_rows", MCount)],

View File

@ -41,6 +41,7 @@ idColumnBuilder :: Expect.ColumnInfoBuilder
idColumnBuilder =
Expect.ColumnInfoBuilder
{ cibName = "id",
cibPosition = 0,
cibType = ColumnScalar PGInteger,
cibNullable = False,
cibIsPrimaryKey = True
@ -50,6 +51,7 @@ nameColumnBuilder :: Expect.ColumnInfoBuilder
nameColumnBuilder =
Expect.ColumnInfoBuilder
{ cibName = "name",
cibPosition = 1,
cibType = ColumnScalar PGText,
cibNullable = False,
cibIsPrimaryKey = False
@ -59,6 +61,7 @@ descColumnBuilder :: Expect.ColumnInfoBuilder
descColumnBuilder =
Expect.ColumnInfoBuilder
{ cibName = "description",
cibPosition = 2,
cibType = ColumnScalar PGText,
cibNullable = False,
cibIsPrimaryKey = False

View File

@ -55,6 +55,8 @@ mkTable name =
data ColumnInfoBuilder = ColumnInfoBuilder
{ -- | name of the column
cibName :: Text,
-- | column position
cibPosition :: Int,
-- | Column type, e.g.
--
-- > ColumnScalar PGText
@ -73,7 +75,7 @@ mkColumnInfo ColumnInfoBuilder {..} =
ColumnInfo
{ ciColumn = unsafePGCol cibName,
ciName = unsafeMkName cibName,
ciPosition = 0,
ciPosition = cibPosition,
ciType = cibType,
ciIsNullable = cibNullable,
ciDescription = Nothing,

View File

@ -14,10 +14,10 @@ query:
response:
- field: article
sql: "SELECT coalesce(json_agg(\"root\" ORDER BY \"root.pg.author_id\" DESC NULLS\
\ FIRST), '[]' ) AS \"root\" FROM (SELECT \"_root.base\".\"author_id\" AS \"\
root.pg.author_id\", row_to_json((SELECT \"_e\" FROM (SELECT \"_root.base\"\
.\"id\" AS \"id\", \"_root.base\".\"title\" AS \"title\", \"_root.base\".\"content\"\
\ AS \"content\" ) AS \"_e\" ) ) AS \"root\" FROM (SELECT * FROM\
\ FIRST), '[]' ) AS \"root\" FROM (SELECT row_to_json((SELECT \"_e\" FROM\
\ (SELECT \"_root.base\".\"id\" AS \"id\", \"_root.base\".\"title\" AS \"title\"\
, \"_root.base\".\"content\" AS \"content\" ) AS \"_e\" ) ) AS \"root\"\
, \"_root.base\".\"author_id\" AS \"root.pg.author_id\" FROM (SELECT * FROM\
\ \"public\".\"article\" WHERE ('true') ORDER BY \"author_id\" DESC NULLS FIRST\
\ LIMIT 5 ) AS \"_root.base\" ORDER BY \"root.pg.author_id\" DESC NULLS FIRST\
\ ) AS \"_root\" "

View File

@ -19,11 +19,11 @@ response:
.\"articles\" AS \"articles\" ) AS \"_e\" ) ) AS \"root\" FROM (SELECT\
\ * FROM \"public\".\"author\" WHERE ('true') ) AS \"_root.base\" LEFT\
\ OUTER JOIN LATERAL (SELECT coalesce(json_agg(\"articles\" ORDER BY \"root.ar.root.articles.pg.id\"\
\ DESC NULLS FIRST), '[]' ) AS \"articles\" FROM (SELECT \"_root.ar.root.articles.base\"\
.\"id\" AS \"root.ar.root.articles.pg.id\", row_to_json((SELECT \"_e\" FROM\
\ (SELECT \"_root.ar.root.articles.base\".\"title\" AS \"title\" ) AS\
\ \"_e\" ) ) AS \"articles\" FROM (SELECT * FROM \"public\".\"article\"\
\ WHERE ((\"_root.base\".\"id\") = (\"author_id\")) ORDER BY \"id\" DESC NULLS\
\ FIRST ) AS \"_root.ar.root.articles.base\" ORDER BY \"root.ar.root.articles.pg.id\"\
\ DESC NULLS FIRST), '[]' ) AS \"articles\" FROM (SELECT row_to_json((SELECT\
\ \"_e\" FROM (SELECT \"_root.ar.root.articles.base\".\"title\" AS \"title\"\
\ ) AS \"_e\" ) ) AS \"articles\", \"_root.ar.root.articles.base\"\
.\"id\" AS \"root.ar.root.articles.pg.id\" FROM (SELECT * FROM \"public\".\"\
article\" WHERE ((\"_root.base\".\"id\") = (\"author_id\")) ORDER BY \"id\"\
\ DESC NULLS FIRST ) AS \"_root.ar.root.articles.base\" ORDER BY \"root.ar.root.articles.pg.id\"\
\ DESC NULLS FIRST ) AS \"_root.ar.root.articles\" ) AS \"_root.ar.root.articles\"\
\ ON ('true') ) AS \"_root\" "

View File

@ -3,12 +3,11 @@ url: /v1/graphql
status: 200
response:
errors:
- message: >-
expected one of the values ['orange', 'yellow', 'green', 'purple', 'blue', 'red']
for type 'colors_enum', but found 'not_a_real_color'
extensions:
code: validation-failed
path: $.selectionSet.insert_users.args.objects[0].favorite_color
- extensions:
code: validation-failed
path: $.selectionSet.insert_users.args.objects[0].favorite_color
message: expected one of the values ['blue', 'green', 'orange', 'purple', 'red',
'yellow'] for type 'colors_enum', but found 'not_a_real_color'
query:
query: |
mutation {

View File

@ -3,12 +3,11 @@ url: /v1/graphql
status: 200
response:
errors:
- extensions:
code: validation-failed
path: $.selectionSet.users.args.where.favorite_color._eq
message: >-
expected one of the values ['orange', 'yellow', 'green', 'purple', 'blue', 'red']
for type 'colors_enum', but found 'not_a_real_color'
- extensions:
code: validation-failed
path: $.selectionSet.users.args.where.favorite_color._eq
message: expected one of the values ['blue', 'green', 'orange', 'purple', 'red',
'yellow'] for type 'colors_enum', but found 'not_a_real_color'
query:
query: |
{

View File

@ -3,12 +3,11 @@ url: /v1/graphql
status: 200
response:
errors:
- extensions:
code: validation-failed
path: $.selectionSet.users.args.where.favorite_color._eq
message: >-
expected one of the values ['orange', 'yellow', 'green', 'purple', 'blue', 'red']
for type 'colors_enum', but found 'not_a_real_color'
- extensions:
code: validation-failed
path: $.selectionSet.users.args.where.favorite_color._eq
message: expected one of the values ['blue', 'green', 'orange', 'purple', 'red',
'yellow'] for type 'colors_enum', but found 'not_a_real_color'
query:
query: |
query ($color: colors_enum) {

View File

@ -45,18 +45,18 @@
name: x-hasura-admin-secret
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
- description: _"first_name" is required (enter it either in parameters
or request body)_
in: query
name: first_name
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
requestBody:
content:
application/json:
@ -115,18 +115,18 @@
name: x-hasura-admin-secret
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
- description: _"first_name" is required (enter it either in parameters
or request body)_
in: query
name: first_name
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
requestBody:
content:
application/json:
@ -185,18 +185,18 @@
name: x-hasura-admin-secret
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
- description: _"first_name" is required (enter it either in parameters
or request body)_
in: query
name: first_name
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
requestBody:
content:
application/json:
@ -255,18 +255,18 @@
name: x-hasura-admin-secret
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
- description: _"first_name" is required (enter it either in parameters
or request body)_
in: query
name: first_name
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
requestBody:
content:
application/json:
@ -325,18 +325,18 @@
name: x-hasura-admin-secret
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
- description: _"first_name" is required (enter it either in parameters
or request body)_
in: query
name: first_name
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
requestBody:
content:
application/json:

View File

@ -113,18 +113,18 @@
name: x-hasura-admin-secret
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
- description: _"first_name" is required (enter it either in parameters
or request body)_
in: query
name: first_name
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
requestBody:
content:
application/json:
@ -177,8 +177,6 @@
pattern: '[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89aAbB][a-f0-9]{3}-[a-f0-9]{12}'
title: uuid
type: string
- description: Try to remove the endpoint
url: /v1/query
status: 200

View File

@ -75,18 +75,18 @@
name: x-hasura-admin-secret
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
- description: _"first_name" is required (enter it either in parameters
or request body)_
in: query
name: first_name
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
requestBody:
content:
application/json:
@ -146,18 +146,18 @@
name: x-hasura-admin-secret
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: path
name: last_name
schema:
type: string
- description: _"first_name" is required (enter it either in parameters
or request body)_
in: path
name: first_name
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: path
name: last_name
schema:
type: string
requestBody:
content:
application/json:
@ -262,7 +262,6 @@
pattern: '[a-f0-9]{8}-[a-f0-9]{4}-4[a-f0-9]{3}-[89aAbB][a-f0-9]{3}-[a-f0-9]{12}'
title: uuid
type: string
- description: Try to remove the endpoint
url: /v1/query
status: 200

View File

@ -96,18 +96,18 @@
name: x-hasura-admin-secret
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: path
name: last_name
schema:
type: string
- description: _"first_name" is required (enter it either in parameters
or request body)_
in: path
name: first_name
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: path
name: last_name
schema:
type: string
requestBody:
content:
application/json:

View File

@ -41,18 +41,18 @@
name: x-hasura-admin-secret
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
- description: _"first_name" is required (enter it either in parameters
or request body)_
in: query
name: first_name
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: query
name: last_name
schema:
type: string
requestBody:
content:
application/json:
@ -78,7 +78,6 @@
properties:
test_table:
items:
title: test_table
description: columns and relationships of "test_table"
nullable: false
properties:
@ -90,6 +89,7 @@
nullable: true
title: String
type: string
title: test_table
type: object
nullable: false
type: array

View File

@ -41,18 +41,18 @@
name: x-hasura-admin-secret
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: path
name: last_name
schema:
type: string
- description: _"first_name" is required (enter it either in parameters
or request body)_
in: path
name: first_name
schema:
type: string
- description: _"last_name" is required (enter it either in parameters or
request body)_
in: path
name: last_name
schema:
type: string
requestBody:
content:
application/json:

View File

@ -4,18 +4,15 @@ url: /v1/query
response:
code: unexpected
error: >-
Encountered conflicting definitions in the selection set for 'mutation_root'
for fields: ['update_article' defined in [table article in source default,
table author in source default], 'delete_article' defined in [table article
in source default, table author in source default], 'update_article_many'
defined in [table article in source default, table author in source default],
'insert_article_one' defined in [table article in source default, table
author in source default], 'update_article_by_pk' defined in [table article
in source default, table author in source default], 'delete_article_by_pk'
defined in [table article in source default, table author in source
default], 'insert_article' defined in [table article in source default,
table author in source default]]. Fields must not be defined more than once
across all sources.
Encountered conflicting definitions in the selection set for 'mutation_root' for fields: [
'delete_article_by_pk' defined in [table article in source default, table author in source default],
'delete_article' defined in [table article in source default, table author in source default],
'update_article_many' defined in [table article in source default, table author in source default],
'update_article_by_pk' defined in [table article in source default, table author in source default],
'update_article' defined in [table article in source default, table author in source default],
'insert_article_one' defined in [table article in source default, table author in source default],
'insert_article' defined in [table article in source default, table author in source default]
]. Fields must not be defined more than once across all sources.
path: $.args
query:
type: set_table_customization