mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 17:02:49 +03:00
This commit is contained in:
parent
d82c6d464f
commit
d8d21c1487
@ -118,9 +118,7 @@ const initQueries = {
|
||||
function_schema: '',
|
||||
has_variadic: false,
|
||||
returns_set: true,
|
||||
return_type_type: {
|
||||
$ilike: '%composite%',
|
||||
},
|
||||
return_type_type: 'c', // COMPOSITE type
|
||||
$or: [
|
||||
{
|
||||
function_type: {
|
||||
@ -159,9 +157,7 @@ const initQueries = {
|
||||
function_schema: '',
|
||||
has_variadic: false,
|
||||
returns_set: true,
|
||||
return_type_type: {
|
||||
$ilike: '%composite%',
|
||||
},
|
||||
return_type_type: 'c', // COMPOSITE type
|
||||
function_type: {
|
||||
$ilike: '%volatile%',
|
||||
},
|
||||
|
@ -174,19 +174,24 @@ const deleteFunctionSql = () => {
|
||||
functionDefinition,
|
||||
inputArgTypes,
|
||||
} = getState().functions;
|
||||
let functionWSchemaName =
|
||||
|
||||
const functionNameWithSchema =
|
||||
'"' + currentSchema + '"' + '.' + '"' + functionName + '"';
|
||||
|
||||
let functionArgString = '';
|
||||
if (inputArgTypes.length > 0) {
|
||||
let functionString = '(';
|
||||
inputArgTypes.forEach((i, index) => {
|
||||
functionString +=
|
||||
i + ' ' + (index === inputArgTypes.length - 1 ? ')' : ',');
|
||||
functionArgString += '(';
|
||||
inputArgTypes.forEach((inputArg, i) => {
|
||||
functionArgString += i > 0 ? ', ' : '';
|
||||
|
||||
functionArgString +=
|
||||
'"' + inputArg.schema + '"' + '.' + '"' + inputArg.name + '"';
|
||||
});
|
||||
functionWSchemaName += functionString;
|
||||
functionArgString += ')';
|
||||
}
|
||||
|
||||
const sqlDropFunction = 'DROP FUNCTION ' + functionWSchemaName;
|
||||
const sqlDropFunction =
|
||||
'DROP FUNCTION ' + functionNameWithSchema + functionArgString;
|
||||
|
||||
const sqlUpQueries = [
|
||||
{
|
||||
@ -194,6 +199,7 @@ const deleteFunctionSql = () => {
|
||||
args: { sql: sqlDropFunction },
|
||||
},
|
||||
];
|
||||
|
||||
const sqlDownQueries = [];
|
||||
if (functionDefinition && functionDefinition.length > 0) {
|
||||
sqlDownQueries.push({
|
||||
|
@ -0,0 +1,156 @@
|
||||
Schema/Metadata API Reference: Computed Fields
|
||||
==============================================
|
||||
|
||||
.. contents:: Table of contents
|
||||
:backlinks: none
|
||||
:depth: 1
|
||||
:local:
|
||||
|
||||
**computed field** is an extra field added to a table, its value is
|
||||
computed via an SQL function which has the table row type as an input argument.
|
||||
Currenty, the Hasura GraphQL engine supports functions returning
|
||||
`base types <https://www.postgresql.org/docs/current/extend-type-system.html#id-1.8.3.5.9>`__ or
|
||||
`table row types <https://www.postgresql.org/docs/current/rowtypes.html#ROWTYPES-DECLARING>`__
|
||||
as computed fields.
|
||||
|
||||
.. _add_computed_field:
|
||||
|
||||
add_computed_field
|
||||
------------------
|
||||
|
||||
``add_computed_field`` is used to define a computed field in a table.
|
||||
There cannot be an existing column or relationship or computed field with
|
||||
the same name.
|
||||
|
||||
Create a ``computed field`` called ``get_articles`` on an ``author`` *table*, using
|
||||
an SQL function called ``fetch_articles``:
|
||||
|
||||
.. code-block:: http
|
||||
|
||||
POST /v1/query HTTP/1.1
|
||||
Content-Type: application/json
|
||||
X-Hasura-Role: admin
|
||||
|
||||
{
|
||||
"type":"add_computed_field",
|
||||
"args":{
|
||||
"table":{
|
||||
"name":"author",
|
||||
"schema":"public"
|
||||
},
|
||||
"name":"get_articles",
|
||||
"definition":{
|
||||
"function":{
|
||||
"name":"fetch_articles",
|
||||
"schema":"public"
|
||||
},
|
||||
"table_argument":"author_row"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.. _add_computed_field_syntax:
|
||||
|
||||
Args syntax
|
||||
^^^^^^^^^^^
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Key
|
||||
- Required
|
||||
- Schema
|
||||
- Description
|
||||
* - table
|
||||
- true
|
||||
- :ref:`TableName <TableName>`
|
||||
- Name of the table
|
||||
* - name
|
||||
- true
|
||||
- :ref:`ComputedFieldName <ComputedFieldName>`
|
||||
- Name of the new computed field
|
||||
* - definition
|
||||
- true
|
||||
- ComputedFieldDefinition_
|
||||
- The computed field definition
|
||||
* - comment
|
||||
- false
|
||||
- text
|
||||
- comment
|
||||
|
||||
.. _ComputedFieldDefinition:
|
||||
|
||||
ComputedFieldDefinition
|
||||
&&&&&&&&&&&&&&&&&&&&&&&
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Key
|
||||
- Required
|
||||
- Schema
|
||||
- Description
|
||||
* - function
|
||||
- true
|
||||
- :ref:`FunctionName <FunctionName>`
|
||||
- The SQL function
|
||||
* - table_argument
|
||||
- false
|
||||
- String
|
||||
- Name of the argument which accepts a table row type. If omitted, the first
|
||||
argument is considered a table argument
|
||||
|
||||
.. _drop_computed_field:
|
||||
|
||||
drop_computed_field
|
||||
-------------------
|
||||
|
||||
``drop_computed_field`` is used to drop a computed field of a table. If
|
||||
there are other objects dependent on this computed field, like permissions, the query will fail and report the dependencies unless ``cascade`` is
|
||||
set to ``true``. If ``cascade`` is set to ``true``, the dependent objects
|
||||
are also dropped.
|
||||
|
||||
Drop a computed field ``get_articles`` from a table ``author``:
|
||||
|
||||
.. code-block:: http
|
||||
|
||||
POST /v1/query HTTP/1.1
|
||||
Content-Type: application/json
|
||||
X-Hasura-Role: admin
|
||||
|
||||
{
|
||||
"type":"drop_computed_field",
|
||||
"args":{
|
||||
"table":{
|
||||
"name":"author",
|
||||
"schema":"public"
|
||||
},
|
||||
"name":"get_articles",
|
||||
"cascade": false
|
||||
}
|
||||
}
|
||||
|
||||
.. _drop_computed_field_syntax:
|
||||
|
||||
Args syntax
|
||||
^^^^^^^^^^^
|
||||
|
||||
.. list-table::
|
||||
:header-rows: 1
|
||||
|
||||
* - Key
|
||||
- Required
|
||||
- Schema
|
||||
- Description
|
||||
* - table
|
||||
- true
|
||||
- :ref:`TableName <TableName>`
|
||||
- Name of the table
|
||||
* - name
|
||||
- true
|
||||
- :ref:`ComputedFieldName <ComputedFieldName>`
|
||||
- Name of the computed field
|
||||
* - cascade
|
||||
- false
|
||||
- Boolean
|
||||
- When set to ``true``, all the dependent items (if any) on this computed fields are also dropped
|
@ -137,6 +137,16 @@ The various types of queries are listed in the following table:
|
||||
- 1
|
||||
- Set comment on an existing relationship
|
||||
|
||||
* - :ref:`add_computed_field`
|
||||
- :ref:`add_computed_field_args <add_computed_field_syntax>`
|
||||
- 1
|
||||
- Add a computed field
|
||||
|
||||
* - :ref:`drop_computed_field`
|
||||
- :ref:`drop_computed_field_args <drop_computed_field_syntax>`
|
||||
- 1
|
||||
- Drop a computed field
|
||||
|
||||
* - :ref:`create_insert_permission`
|
||||
- :ref:`create_insert_permission_args <create_insert_permission_syntax>`
|
||||
- 1
|
||||
@ -278,6 +288,7 @@ The various types of queries are listed in the following table:
|
||||
- :doc:`Tables/Views <table-view>`
|
||||
- :doc:`Custom SQL Functions <custom-functions>`
|
||||
- :doc:`Relationships <relationship>`
|
||||
- :doc:`Computed Fields <computed-field>`
|
||||
- :doc:`Permissions <permission>`
|
||||
- :doc:`Event Triggers <event-triggers>`
|
||||
- :doc:`Remote Schemas <remote-schemas>`
|
||||
@ -362,6 +373,7 @@ See :doc:`../../deployment/graphql-engine-flags/reference` for info on setting t
|
||||
Custom Functions <custom-functions>
|
||||
Relationships <relationship>
|
||||
Permissions <permission>
|
||||
Computed Fields <computed-field>
|
||||
Event Triggers <event-triggers>
|
||||
Remote Schemas <remote-schemas>
|
||||
Query Collections <query-collections>
|
||||
|
@ -9,7 +9,7 @@ Schema/Metadata API Reference: Permissions
|
||||
The permission layer is designed to restrict the operations that can be
|
||||
performed by various users. Permissions can be defined on various operations
|
||||
(insert/select/update/delete) at a role level granularity. By default, the ``admin``
|
||||
role has unrestricted access to all operations.
|
||||
role has unrestricted access to all operations.
|
||||
|
||||
.. admonition:: Variables in rules
|
||||
|
||||
@ -23,7 +23,7 @@ create_insert_permission
|
||||
------------------------
|
||||
|
||||
An insert permission is used to enforce constraints on the data that is being
|
||||
inserted.
|
||||
inserted.
|
||||
|
||||
Let's look at an example, a permission for the ``user`` role to insert into the
|
||||
``article`` table. What is the constraint that we would like to enforce here? *A
|
||||
@ -61,7 +61,7 @@ This reads as follows - for the ``user`` role:
|
||||
* When this insert happens, the value of the column ``id`` will be automatically ``set`` to the value of the resolved session variable ``X-HASURA-USER-ID``.
|
||||
|
||||
|
||||
The argument for ``check`` is a boolean expression which has the same syntax as the ``where`` clause in the ``select`` query, making it extremely expressive. For example,
|
||||
The argument for ``check`` is a boolean expression which has the same syntax as the ``where`` clause in the ``select`` query, making it extremely expressive. For example,
|
||||
|
||||
.. code-block:: http
|
||||
|
||||
@ -93,7 +93,7 @@ The argument for ``check`` is a boolean expression which has the same syntax as
|
||||
|
||||
In the above definition, the row is allowed to be inserted if the ``author_id``
|
||||
is the same as the request's user id and ``is_reviewed`` is ``false`` when the
|
||||
``category`` is "editorial".
|
||||
``category`` is "editorial".
|
||||
|
||||
.. _create_insert_permission_syntax:
|
||||
|
||||
@ -191,7 +191,7 @@ A select permission is used to restrict access to only the specified columns and
|
||||
|
||||
Let's look at an example, a permission for the ``user`` role to select from the
|
||||
``article`` table: all columns can be read, as well as the rows that have been published or
|
||||
authored by the user themselves.
|
||||
authored by the user themselves.
|
||||
|
||||
.. code-block:: http
|
||||
|
||||
@ -273,6 +273,10 @@ SelectPermission
|
||||
- true
|
||||
- :ref:`PGColumn` array (or) ``'*'``
|
||||
- Only these columns are selectable (or all when ``'*'`` is specified)
|
||||
* - computed_fields
|
||||
- false
|
||||
- :ref:`ComputedFieldName` array
|
||||
- Only these computed fields are selectable
|
||||
* - filter
|
||||
- true
|
||||
- :ref:`BoolExp`
|
||||
@ -321,7 +325,7 @@ create_update_permission
|
||||
------------------------
|
||||
|
||||
An update permission is used to restrict the columns and rows that can be
|
||||
updated. Its structure is quite similar to the select permission.
|
||||
updated. Its structure is quite similar to the select permission.
|
||||
|
||||
An example:
|
||||
|
||||
@ -361,7 +365,7 @@ This reads as follows - for the ``user`` role:
|
||||
It is important to deny updates to columns that will determine the row
|
||||
ownership. In the above example, the ``author_id`` column determines the
|
||||
ownership of a row in the ``article`` table. Columns such as this should
|
||||
never be allowed to be updated.
|
||||
never be allowed to be updated.
|
||||
|
||||
.. _create_update_permission_syntax:
|
||||
|
||||
@ -416,7 +420,7 @@ UpdatePermission
|
||||
- false
|
||||
- :ref:`ColumnPresetExp`
|
||||
- Preset values for columns that can be sourced from session variables or static values.
|
||||
|
||||
|
||||
|
||||
.. _drop_update_permission:
|
||||
|
||||
@ -559,7 +563,7 @@ set_permission_comment
|
||||
----------------------
|
||||
|
||||
``set_permission_comment`` is used to set/update the comment on a permission.
|
||||
Setting the comment to ``null`` removes it.
|
||||
Setting the comment to ``null`` removes it.
|
||||
|
||||
An example:
|
||||
|
||||
|
@ -54,6 +54,15 @@ QualifiedFunction
|
||||
RoleName
|
||||
^^^^^^^^
|
||||
|
||||
.. parsed-literal::
|
||||
|
||||
String
|
||||
|
||||
.. _ComputedFieldName:
|
||||
|
||||
ComputedFieldName
|
||||
^^^^^^^^^^^^^^^^^^
|
||||
|
||||
.. parsed-literal::
|
||||
|
||||
String
|
||||
|
@ -62,4 +62,3 @@ based on a typical author/article schema for reference.
|
||||
derived-data
|
||||
control-access
|
||||
variables-aliases-fragments-directives
|
||||
|
||||
|
158
docs/graphql/manual/schema/computed-fields.rst
Normal file
158
docs/graphql/manual/schema/computed-fields.rst
Normal file
@ -0,0 +1,158 @@
|
||||
Computed fields
|
||||
===============
|
||||
|
||||
.. contents:: Table of contents
|
||||
:backlinks: none
|
||||
:depth: 2
|
||||
:local:
|
||||
|
||||
What are computed fields?
|
||||
-------------------------
|
||||
|
||||
Computed fields are virtual values or objects that are dynamically computed and can be queried along with a table's columns.
|
||||
Computed fields are computed when requested for via SQL functions using other columns of the table and other custom inputs if needed.
|
||||
|
||||
.. note::
|
||||
|
||||
Computed fields are only exposed over the GraphQL API and the database schema is not modified on addition of a computed field.
|
||||
|
||||
Supported SQL functions
|
||||
***********************
|
||||
|
||||
Only functions which satisfy the following constraints can be added as a computed field to a table.
|
||||
(*terminology from* `Postgres docs <https://www.postgresql.org/docs/current/sql-createfunction.html>`__):
|
||||
|
||||
- **Function behaviour**: ONLY ``STABLE`` or ``IMMUTABLE``
|
||||
- **Argument modes**: ONLY ``IN``
|
||||
- **Table Argument**: One input argument with a table row type
|
||||
- **Return type**: Either ``SETOF <table-name>`` or ``BASE`` type
|
||||
|
||||
Defining computed fields
|
||||
------------------------
|
||||
|
||||
Based on the SQL function's return type, we can define two types of computed fields:
|
||||
|
||||
1. Scalar computed fields
|
||||
*************************
|
||||
|
||||
Computed fields whose associated SQL function returns a
|
||||
`base type <https://www.postgresql.org/docs/current/extend-type-system.html#id-1.8.3.5.9>`__ like *Integer*,
|
||||
*Boolean*, *Geography* etc.
|
||||
|
||||
**Example:**
|
||||
|
||||
The ``author`` table has two ``text`` columns: ``first_name`` and ``last_name``.
|
||||
|
||||
Define an SQL function called ``author_full_name``:
|
||||
|
||||
.. code-block:: plpgsql
|
||||
|
||||
CREATE FUNCTION author_full_name(author_row author)
|
||||
RETURNS TEXT AS $$
|
||||
SELECT author_row.first_name || ' ' || author_row.last_name
|
||||
$$ LANGUAGE sql STABLE;
|
||||
|
||||
Add a computed field called ``full_name`` to the ``author`` table using the SQL function above.
|
||||
See :doc:`API Reference <../api-reference/schema-metadata-api/computed-field>`.
|
||||
|
||||
Query data from the ``author`` table:
|
||||
|
||||
.. graphiql::
|
||||
:view_only:
|
||||
:query:
|
||||
query {
|
||||
author {
|
||||
id
|
||||
first_name
|
||||
last_name
|
||||
full_name
|
||||
}
|
||||
}
|
||||
:response:
|
||||
{
|
||||
"data": {
|
||||
"author": [
|
||||
{
|
||||
"id": 1,
|
||||
"first_name": "Chris",
|
||||
"last_name": "Raichael",
|
||||
"full_name": "Chris Raichael"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
2. Table computed fields
|
||||
************************
|
||||
|
||||
Computed fields whose associated SQL function returns ``SETOF <table-name>`` are table computed fields.
|
||||
The return table must be tracked to define such a computed field.
|
||||
|
||||
**Example:**
|
||||
|
||||
In a simple ``author <-> article`` schema, we can define a :doc:`relationship <../schema/relationships/index>` on the ``author``
|
||||
table to fetch authors along with their articles. We can make use of computed fields to fetch the author's articles
|
||||
by search.
|
||||
|
||||
Define an SQL function called ``fetch_articles``:
|
||||
|
||||
.. code-block:: plpgsql
|
||||
|
||||
CREATE FUNCTION fetch_articles(search text, author_row author)
|
||||
RETURNS SETOF article AS $$
|
||||
SELECT *
|
||||
FROM article
|
||||
WHERE
|
||||
( title ilike ('%' || search || '%')
|
||||
OR content ilike ('%' || search || '%')
|
||||
) AND author_id = author_row.id
|
||||
$$ LANGUAGE sql STABLE;
|
||||
|
||||
Add a computed field called ``get_articles`` to the ``author`` table using the SQL function above.
|
||||
See :doc:`API Reference <../api-reference/schema-metadata-api/computed-field>`.
|
||||
|
||||
Query data from the ``author`` table:
|
||||
|
||||
.. graphiql::
|
||||
:view_only:
|
||||
:query:
|
||||
query {
|
||||
author {
|
||||
id
|
||||
first_name
|
||||
last_name
|
||||
get_articles(args: {search: "Hasura"}){
|
||||
id
|
||||
title
|
||||
content
|
||||
}
|
||||
}
|
||||
}
|
||||
:response:
|
||||
{
|
||||
"data": {
|
||||
"author": [
|
||||
{
|
||||
"id": 1,
|
||||
"first_name": "Chris",
|
||||
"last_name": "Raichael",
|
||||
"get_articles": [
|
||||
{
|
||||
"id": 1,
|
||||
"title": "Computed fields in Hasura",
|
||||
"content": "lorem ipsum dolor sit amet"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
Computed fields vs. Postgres generated columns
|
||||
----------------------------------------------
|
||||
|
||||
Postgres, from version ``12``, is introducing `Generated Columns <https://www.postgresql.org/docs/12/ddl-generated-columns.html>`__.
|
||||
The value of generated columns is also computed from other columns of a table. Postgres' generated columns
|
||||
come with their own limitations. Hasura's computed fields are defined via an SQL function, which allows users
|
||||
to define any complex business logic in a function. Generated columns will go together with computed fields where
|
||||
Hasura treats generated columns as normal Postgres columns.
|
@ -27,6 +27,7 @@ Postgres constructs.
|
||||
Customise with views <views>
|
||||
Default field values <default-values/index>
|
||||
Enum type fields <enums>
|
||||
computed-fields
|
||||
Using an existing database <using-existing-database>
|
||||
Setting up soft deletes <soft-deletes>
|
||||
Export GraphQL schema <export-graphql-schema>
|
||||
|
@ -180,6 +180,7 @@ library
|
||||
, Hasura.RQL.Types.Catalog
|
||||
, Hasura.RQL.Types.Column
|
||||
, Hasura.RQL.Types.Common
|
||||
, Hasura.RQL.Types.ComputedField
|
||||
, Hasura.RQL.Types.DML
|
||||
, Hasura.RQL.Types.Error
|
||||
, Hasura.RQL.Types.EventTrigger
|
||||
@ -187,11 +188,12 @@ library
|
||||
, Hasura.RQL.Types.Permission
|
||||
, Hasura.RQL.Types.QueryCollection
|
||||
, Hasura.RQL.Types.RemoteSchema
|
||||
, Hasura.RQL.DDL.ComputedField
|
||||
, Hasura.RQL.DDL.Relationship
|
||||
, Hasura.RQL.DDL.Deps
|
||||
, Hasura.RQL.DDL.Permission.Internal
|
||||
, Hasura.RQL.DDL.Permission.Triggers
|
||||
, Hasura.RQL.DDL.Permission
|
||||
, Hasura.RQL.DDL.Relationship
|
||||
, Hasura.RQL.DDL.Relationship.Rename
|
||||
, Hasura.RQL.DDL.Relationship.Types
|
||||
, Hasura.RQL.DDL.Schema
|
||||
|
@ -12,7 +12,7 @@ import Hasura.Prelude
|
||||
import qualified Data.Text as T
|
||||
|
||||
latestCatalogVersion :: Integer
|
||||
latestCatalogVersion = 25
|
||||
latestCatalogVersion = 26
|
||||
|
||||
latestCatalogVersionString :: T.Text
|
||||
latestCatalogVersionString = T.pack $ show latestCatalogVersion
|
||||
|
@ -143,7 +143,7 @@ parseCastExpression
|
||||
parseCastExpression =
|
||||
withObjectM $ \_ objM -> forM objM $ \obj -> do
|
||||
targetExps <- forM (OMap.toList obj) $ \(targetTypeName, castedComparisonExpressionInput) -> do
|
||||
let targetType = txtToPgColTy $ G.unName targetTypeName
|
||||
let targetType = textToPGScalarType $ G.unName targetTypeName
|
||||
castedComparisonExpressions <- parseOpExps (PGColumnScalar targetType) castedComparisonExpressionInput
|
||||
return (targetType, castedComparisonExpressions)
|
||||
return $ Map.fromList targetExps
|
||||
@ -159,13 +159,15 @@ parseColExp
|
||||
parseColExp nt n val = do
|
||||
fldInfo <- getFldInfo nt n
|
||||
case fldInfo of
|
||||
Left pgColInfo -> do
|
||||
RFPGColumn pgColInfo -> do
|
||||
opExps <- parseOpExps (pgiType pgColInfo) val
|
||||
return $ AVCol pgColInfo opExps
|
||||
Right (RelationshipField relInfo _ _ permExp _)-> do
|
||||
RFRelationship (RelationshipField relInfo _ _ permExp _)-> do
|
||||
relBoolExp <- parseBoolExp val
|
||||
return $ AVRel relInfo $ andAnnBoolExps relBoolExp $
|
||||
fmapAnnBoolExp partialSQLExpToUnresolvedVal permExp
|
||||
RFComputedField _ -> throw500
|
||||
"computed fields are not allowed in bool_exp"
|
||||
|
||||
parseBoolExp
|
||||
:: ( MonadReusability m
|
||||
|
@ -53,7 +53,7 @@ import qualified Hasura.SQL.DML as S
|
||||
getFldInfo
|
||||
:: (MonadError QErr m, MonadReader r m, Has FieldMap r)
|
||||
=> G.NamedType -> G.Name
|
||||
-> m (Either PGColumnInfo RelationshipField)
|
||||
-> m ResolveField
|
||||
getFldInfo nt n = do
|
||||
fldMap <- asks getter
|
||||
onNothing (Map.lookup (nt,n) fldMap) $
|
||||
@ -66,9 +66,12 @@ getPGColInfo
|
||||
getPGColInfo nt n = do
|
||||
fldInfo <- getFldInfo nt n
|
||||
case fldInfo of
|
||||
Left pgColInfo -> return pgColInfo
|
||||
Right _ -> throw500 $
|
||||
"found relinfo when expecting pgcolinfo for "
|
||||
RFPGColumn pgColInfo -> return pgColInfo
|
||||
RFRelationship _ -> throw500 $ mkErrMsg "relation"
|
||||
RFComputedField _ -> throw500 $ mkErrMsg "computed field"
|
||||
where
|
||||
mkErrMsg ty =
|
||||
"found " <> ty <> " when expecting pgcolinfo for "
|
||||
<> showNamedTy nt <> ":" <> showName n
|
||||
|
||||
getArg
|
||||
|
@ -55,6 +55,34 @@ argsToColOp args = maybe (return Nothing) toOp $ Map.lookup "path" args
|
||||
|
||||
type AnnFlds = RS.AnnFldsG UnresolvedVal
|
||||
|
||||
resolveComputedField
|
||||
:: ( MonadReusability m, MonadReader r m, Has FieldMap r
|
||||
, Has OrdByCtx r, Has SQLGenCtx r, MonadError QErr m
|
||||
)
|
||||
=> ComputedField -> Field -> m (RS.ComputedFieldSel UnresolvedVal)
|
||||
resolveComputedField computedField fld = fieldAsPath fld $ do
|
||||
funcArgsM <- withArgM (_fArguments fld) "args" $ parseFunctionArgs argSeq
|
||||
let funcArgs = fromMaybe RS.emptyFunctionArgsExp funcArgsM
|
||||
argsWithTableArgument = withTableArgument funcArgs
|
||||
case fieldType of
|
||||
CFTScalar scalarTy -> do
|
||||
colOpM <- argsToColOp $ _fArguments fld
|
||||
pure $ RS.CFSScalar $
|
||||
RS.ComputedFieldScalarSel qf argsWithTableArgument scalarTy colOpM
|
||||
CFTTable (ComputedFieldTable _ cols permFilter permLimit) -> do
|
||||
let functionFrom = RS.FromFunction qf argsWithTableArgument
|
||||
RS.CFSTable <$> fromField functionFrom cols permFilter permLimit fld
|
||||
where
|
||||
ComputedField _ function argSeq fieldType = computedField
|
||||
ComputedFieldFunction qf _ tableArg _ = function
|
||||
withTableArgument resolvedArgs =
|
||||
let argsExp@(RS.FunctionArgsExp positional named) = RS.AEInput <$> resolvedArgs
|
||||
in case tableArg of
|
||||
FTAFirst ->
|
||||
RS.FunctionArgsExp (RS.AETableRow:positional) named
|
||||
FTANamed argName index ->
|
||||
RS.insertFunctionArg argName index RS.AETableRow argsExp
|
||||
|
||||
fromSelSet
|
||||
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
|
||||
, Has OrdByCtx r, Has SQLGenCtx r
|
||||
@ -69,9 +97,11 @@ fromSelSet fldTy flds =
|
||||
_ -> do
|
||||
fldInfo <- getFldInfo fldTy fldName
|
||||
case fldInfo of
|
||||
Left colInfo ->
|
||||
RFPGColumn colInfo ->
|
||||
RS.FCol colInfo <$> argsToColOp (_fArguments fld)
|
||||
Right (RelationshipField relInfo isAgg colGNameMap tableFilter tableLimit) -> do
|
||||
RFComputedField computedField ->
|
||||
RS.FComputedField <$> resolveComputedField computedField fld
|
||||
RFRelationship (RelationshipField relInfo isAgg colGNameMap tableFilter tableLimit) -> do
|
||||
let relTN = riRTable relInfo
|
||||
colMapping = riMapping relInfo
|
||||
rn = riName relInfo
|
||||
@ -79,7 +109,7 @@ fromSelSet fldTy flds =
|
||||
aggSel <- fromAggField relTN colGNameMap tableFilter tableLimit fld
|
||||
return $ RS.FArr $ RS.ASAgg $ RS.AnnRelG rn colMapping aggSel
|
||||
else do
|
||||
annSel <- fromField relTN colGNameMap tableFilter tableLimit fld
|
||||
annSel <- fromField (RS.FromTable relTN) colGNameMap tableFilter tableLimit fld
|
||||
let annRel = RS.AnnRelG rn colMapping annSel
|
||||
return $ case riType relInfo of
|
||||
ObjRel -> RS.FObj annRel
|
||||
@ -140,19 +170,18 @@ fromField
|
||||
:: ( MonadReusability m, MonadError QErr m, MonadReader r m, Has FieldMap r
|
||||
, Has OrdByCtx r, Has SQLGenCtx r
|
||||
)
|
||||
=> QualifiedTable
|
||||
=> RS.SelectFromG UnresolvedVal
|
||||
-> PGColGNameMap
|
||||
-> AnnBoolExpPartialSQL
|
||||
-> Maybe Int
|
||||
-> Field -> m AnnSimpleSelect
|
||||
fromField tn colGNameMap permFilter permLimitM fld = fieldAsPath fld $ do
|
||||
fromField selFrom colGNameMap permFilter permLimitM fld = fieldAsPath fld $ do
|
||||
tableArgs <- parseTableArgs colGNameMap args
|
||||
annFlds <- fromSelSet (_fType fld) $ _fSelSet fld
|
||||
let unresolvedPermFltr = fmapAnnBoolExp partialSQLExpToUnresolvedVal permFilter
|
||||
let tabFrom = RS.TableFrom tn Nothing
|
||||
tabPerm = RS.TablePerm unresolvedPermFltr permLimitM
|
||||
let tabPerm = RS.TablePerm unresolvedPermFltr permLimitM
|
||||
strfyNum <- stringifyNum <$> asks getter
|
||||
return $ RS.AnnSelG annFlds tabFrom tabPerm tableArgs strfyNum
|
||||
return $ RS.AnnSelG annFlds selFrom tabPerm tableArgs strfyNum
|
||||
where
|
||||
args = _fArguments fld
|
||||
|
||||
@ -300,7 +329,7 @@ fromFieldByPKey
|
||||
fromFieldByPKey tn colArgMap permFilter fld = fieldAsPath fld $ do
|
||||
boolExp <- pgColValToBoolExp colArgMap $ _fArguments fld
|
||||
annFlds <- fromSelSet fldTy $ _fSelSet fld
|
||||
let tabFrom = RS.TableFrom tn Nothing
|
||||
let tabFrom = RS.FromTable tn
|
||||
unresolvedPermFltr = fmapAnnBoolExp partialSQLExpToUnresolvedVal
|
||||
permFilter
|
||||
tabPerm = RS.TablePerm unresolvedPermFltr Nothing
|
||||
@ -317,7 +346,7 @@ convertSelect
|
||||
=> SelOpCtx -> Field -> m QueryRootFldUnresolved
|
||||
convertSelect opCtx fld =
|
||||
withPathK "selectionSet" $ QRFSimple <$>
|
||||
fromField qt colGNameMap permFilter permLimit fld
|
||||
fromField (RS.FromTable qt) colGNameMap permFilter permLimit fld
|
||||
where
|
||||
SelOpCtx qt _ colGNameMap permFilter permLimit = opCtx
|
||||
|
||||
@ -402,7 +431,7 @@ fromAggField tn colGNameMap permFilter permLimit fld = fieldAsPath fld $ do
|
||||
aggSelFlds <- fromAggSelSet colGNameMap (_fType fld) (_fSelSet fld)
|
||||
let unresolvedPermFltr =
|
||||
fmapAnnBoolExp partialSQLExpToUnresolvedVal permFilter
|
||||
let tabFrom = RS.TableFrom tn Nothing
|
||||
let tabFrom = RS.FromTable tn
|
||||
tabPerm = RS.TablePerm unresolvedPermFltr permLimit
|
||||
strfyNum <- stringifyNum <$> asks getter
|
||||
return $ RS.AnnSelG aggSelFlds tabFrom tabPerm tableArgs strfyNum
|
||||
@ -468,7 +497,7 @@ convertFuncQuerySimple
|
||||
=> FuncQOpCtx -> Field -> m QueryRootFldUnresolved
|
||||
convertFuncQuerySimple funcOpCtx fld =
|
||||
withPathK "selectionSet" $ QRFFnSimple <$>
|
||||
fromFuncQueryField (fromField qt colGNameMap permFilter permLimit) qf argSeq fld
|
||||
fromFuncQueryField (fromField (RS.FromTable qt) colGNameMap permFilter permLimit) qf argSeq fld
|
||||
where
|
||||
FuncQOpCtx qt _ colGNameMap permFilter permLimit qf argSeq = funcOpCtx
|
||||
|
||||
|
@ -4,22 +4,24 @@ module Hasura.GraphQL.Resolve.Types
|
||||
, MonadReusability(..)
|
||||
) where
|
||||
|
||||
import Control.Lens.TH
|
||||
import Hasura.Prelude
|
||||
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.Sequence as Seq
|
||||
import qualified Data.Text as T
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.Sequence as Seq
|
||||
import qualified Data.Text as T
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import Hasura.GraphQL.Validate.Types
|
||||
import Hasura.RQL.Types.BoolExp
|
||||
import Hasura.RQL.Types.Column
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.ComputedField
|
||||
import Hasura.RQL.Types.Permission
|
||||
import Hasura.SQL.Types
|
||||
import Hasura.SQL.Value
|
||||
|
||||
import qualified Hasura.SQL.DML as S
|
||||
import qualified Hasura.SQL.DML as S
|
||||
|
||||
data QueryCtx
|
||||
= QCSelect !SelOpCtx
|
||||
@ -114,9 +116,34 @@ data RelationshipField
|
||||
, _rfPermLimit :: !(Maybe Int)
|
||||
} deriving (Show, Eq)
|
||||
|
||||
type FieldMap =
|
||||
Map.HashMap (G.NamedType, G.Name)
|
||||
(Either PGColumnInfo RelationshipField)
|
||||
data ComputedFieldTable
|
||||
= ComputedFieldTable
|
||||
{ _cftTable :: !QualifiedTable
|
||||
, _cftCols :: !PGColGNameMap
|
||||
, _cftPermFilter :: !AnnBoolExpPartialSQL
|
||||
, _cftPermLimit :: !(Maybe Int)
|
||||
} deriving (Show, Eq)
|
||||
|
||||
data ComputedFieldType
|
||||
= CFTScalar !PGScalarType
|
||||
| CFTTable !ComputedFieldTable
|
||||
deriving (Show, Eq)
|
||||
|
||||
data ComputedField
|
||||
= ComputedField
|
||||
{ _cfName :: !ComputedFieldName
|
||||
, _cfFunction :: !ComputedFieldFunction
|
||||
, _cfArgSeq :: !FuncArgSeq
|
||||
, _cfType :: !ComputedFieldType
|
||||
} deriving (Show, Eq)
|
||||
|
||||
data ResolveField
|
||||
= RFPGColumn !PGColumnInfo
|
||||
| RFRelationship !RelationshipField
|
||||
| RFComputedField !ComputedField
|
||||
deriving (Show, Eq)
|
||||
|
||||
type FieldMap = Map.HashMap (G.NamedType, G.Name) ResolveField
|
||||
|
||||
-- order by context
|
||||
data OrdByItem
|
||||
@ -186,3 +213,6 @@ data UnresolvedVal
|
||||
deriving (Show, Eq)
|
||||
|
||||
type AnnBoolExpUnresolved = AnnBoolExp UnresolvedVal
|
||||
|
||||
-- template haskell related
|
||||
$(makePrisms ''ResolveField)
|
||||
|
@ -75,12 +75,10 @@ upsertable uniqueOrPrimaryCons isUpsertAllowed isAView =
|
||||
|
||||
getValidCols
|
||||
:: FieldInfoMap PGColumnInfo -> [PGColumnInfo]
|
||||
getValidCols fim = filter isValidCol cols
|
||||
where
|
||||
cols = fst $ partitionFieldInfos $ Map.elems fim
|
||||
getValidCols = filter isValidCol . getCols
|
||||
|
||||
getValidRels :: FieldInfoMap PGColumnInfo -> [RelInfo]
|
||||
getValidRels = filter isValidRel' . snd . partitionFieldInfos . Map.elems
|
||||
getValidRels = filter isValidRel' . getRels
|
||||
where
|
||||
isValidRel' (RelInfo rn _ _ remTab _) = isValidRel rn remTab
|
||||
|
||||
@ -112,6 +110,11 @@ compAggOps = ["max", "min"]
|
||||
isAggFld :: G.Name -> Bool
|
||||
isAggFld = flip elem (numAggOps <> compAggOps)
|
||||
|
||||
mkFuncArgSeq :: Seq.Seq FunctionArg -> Seq.Seq FuncArgItem
|
||||
mkFuncArgSeq inputArgs =
|
||||
Seq.fromList $ procFuncArgs inputArgs $
|
||||
\fa t -> FuncArgItem (G.Name t) (faName fa) (faHasDefault fa)
|
||||
|
||||
mkGCtxRole'
|
||||
:: QualifiedTable
|
||||
-> Maybe PGDescription
|
||||
@ -149,7 +152,7 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
|
||||
|
||||
allTypes = relInsInpObjTys <> onConflictTypes <> jsonOpTys
|
||||
<> queryTypes <> aggQueryTypes <> mutationTypes
|
||||
<> funcInpArgTys <> referencedEnumTypes
|
||||
<> funcInpArgTys <> referencedEnumTypes <> computedColFuncArgsInps
|
||||
|
||||
queryTypes = catMaybes
|
||||
[ TIInpObj <$> boolExpInpObjM
|
||||
@ -177,7 +180,7 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
|
||||
|
||||
-- helper
|
||||
mkColFldMap ty cols = Map.fromList $ flip map cols $
|
||||
\ci -> ((ty, pgiName ci), Left ci)
|
||||
\ci -> ((ty, pgiName ci), RFPGColumn ci)
|
||||
|
||||
-- insert input type
|
||||
insInpObjM = uncurry (mkInsInp tn) <$> insPermM
|
||||
@ -196,7 +199,7 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
|
||||
updSetInpObjFldsM = mkColFldMap (mkUpdSetTy tn) <$> updColsM
|
||||
|
||||
selFldsM = snd <$> selPermM
|
||||
selColNamesM = (map pgiName . lefts) <$> selFldsM
|
||||
selColNamesM = (map pgiName . getPGColumnFields) <$> selFldsM
|
||||
selColInpTyM = mkSelColumnTy tn <$> selColNamesM
|
||||
-- boolexp input type
|
||||
boolExpInpObjM = case selFldsM of
|
||||
@ -209,25 +212,30 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
|
||||
else Nothing
|
||||
|
||||
-- funcargs input type
|
||||
funcArgInpObjs = mapMaybe mkFuncArgsInp funcs
|
||||
funcArgInpObjs = flip mapMaybe funcs $ \func ->
|
||||
mkFuncArgsInp (fiName func) (fiInputArgs func)
|
||||
-- funcArgCtx = Map.unions funcArgCtxs
|
||||
funcArgScalarSet = funcs ^.. folded.to fiInputArgs.folded.to faType
|
||||
funcArgScalarSet = funcs ^.. folded.to fiInputArgs.folded.to (_qptName.faType)
|
||||
|
||||
-- helper
|
||||
mkFldMap ty = Map.fromList . concatMap (mkFld ty)
|
||||
mkFld ty = \case
|
||||
Left ci -> [((ty, pgiName ci), Left ci)]
|
||||
Right (RelationshipFieldInfo relInfo allowAgg cols permFilter permLimit _) ->
|
||||
SFPGColumn ci -> [((ty, pgiName ci), RFPGColumn ci)]
|
||||
SFRelationship (RelationshipFieldInfo relInfo allowAgg cols permFilter permLimit _) ->
|
||||
let relationshipName = riName relInfo
|
||||
relFld = ( (ty, mkRelName relationshipName)
|
||||
, Right $ RelationshipField relInfo False cols permFilter permLimit
|
||||
, RFRelationship $ RelationshipField relInfo False cols permFilter permLimit
|
||||
)
|
||||
aggRelFld = ( (ty, mkAggRelName relationshipName)
|
||||
, Right $ RelationshipField relInfo True cols permFilter permLimit
|
||||
, RFRelationship $ RelationshipField relInfo True cols permFilter permLimit
|
||||
)
|
||||
in case riType relInfo of
|
||||
ObjRel -> [relFld]
|
||||
ArrRel -> bool [relFld] [relFld, aggRelFld] allowAgg
|
||||
SFComputedField cf -> pure
|
||||
( (ty, mkComputedFieldName $ _cfName cf)
|
||||
, RFComputedField cf
|
||||
)
|
||||
|
||||
-- the fields used in bool exp
|
||||
boolExpInpObjFldsM = mkFldMap (mkBoolExpTy tn) <$> selFldsM
|
||||
@ -247,7 +255,7 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
|
||||
-- aggregate objs and order by inputs
|
||||
(aggObjs, aggOrdByInps) = case selPermM of
|
||||
Just (True, selFlds) ->
|
||||
let cols = lefts selFlds
|
||||
let cols = getPGColumnFields selFlds
|
||||
numCols = onlyNumCols cols
|
||||
compCols = onlyComparableCols cols
|
||||
objs = [ mkTableAggObj tn
|
||||
@ -258,16 +266,16 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
|
||||
in (objs, ordByInps)
|
||||
_ -> ([], [])
|
||||
|
||||
getNumCols = onlyNumCols . lefts
|
||||
getCompCols = onlyComparableCols . lefts
|
||||
getNumericCols = onlyNumCols . getPGColumnFields
|
||||
getComparableCols = onlyComparableCols . getPGColumnFields
|
||||
onlyFloat = const $ mkScalarTy PGFloat
|
||||
|
||||
mkTypeMaker "sum" = mkColumnType
|
||||
mkTypeMaker _ = onlyFloat
|
||||
|
||||
mkColAggFldsObjs flds =
|
||||
let numCols = getNumCols flds
|
||||
compCols = getCompCols flds
|
||||
let numCols = getNumericCols flds
|
||||
compCols = getComparableCols flds
|
||||
mkNumObjFld n = mkTableColAggFldsObj tn n (mkTypeMaker n) numCols
|
||||
mkCompObjFld n = mkTableColAggFldsObj tn n mkColumnType compCols
|
||||
numFldsObjs = bool (map mkNumObjFld numAggOps) [] $ null numCols
|
||||
@ -288,7 +296,7 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
|
||||
-- never referenced anywhere else)
|
||||
referencedEnumTypes =
|
||||
let allColumnInfos =
|
||||
(selPermM ^.. _Just._2.traverse._Left)
|
||||
(selPermM ^.. _Just._2.traverse._SFPGColumn)
|
||||
<> (insPermM ^. _Just._1)
|
||||
<> (updColsM ^. _Just)
|
||||
allEnumReferences = allColumnInfos ^.. traverse.to pgiType._PGColumnEnumReference
|
||||
@ -297,6 +305,16 @@ mkGCtxRole' tn descM insPermM selPermM updColsM delPermM pkeyCols constraints vi
|
||||
in TIEnum $ mkHsraEnumTyInfo Nothing typeName (EnumValuesReference enumReference)
|
||||
|
||||
|
||||
-- computed fields function args input objects
|
||||
mkComputedFieldFuncArgsInp computedColInfo =
|
||||
let ComputedFieldFunction qf inputArgs tableArg _ =
|
||||
_cfFunction computedColInfo
|
||||
withoutTableArg = functionArgsWithoutTableArg tableArg inputArgs
|
||||
in mkFuncArgsInp qf withoutTableArg
|
||||
|
||||
computedColFuncArgsInps = map TIInpObj $ catMaybes $
|
||||
maybe [] (map mkComputedFieldFuncArgsInp . getComputedFields) selFldsM
|
||||
|
||||
getRootFldsRole'
|
||||
:: QualifiedTable
|
||||
-> [PGColumnInfo]
|
||||
@ -398,8 +416,7 @@ getRootFldsRole' tn primCols constraints fields funcs insM
|
||||
, g fi $ fiDescription fi
|
||||
)
|
||||
|
||||
mkFuncArgItemSeq fi = Seq.fromList $ procFuncArgs (fiInputArgs fi)
|
||||
$ \fa t -> FuncArgItem (G.Name t) (faName fa) (faHasDefault fa)
|
||||
mkFuncArgItemSeq = mkFuncArgSeq . fiInputArgs
|
||||
|
||||
|
||||
getSelPermission :: TableInfo PGColumnInfo -> RoleName -> Maybe SelPermInfo
|
||||
@ -423,7 +440,7 @@ getSelPerm tableCache fields role selPermInfo = do
|
||||
remTableColGNameMap =
|
||||
mkPGColGNameMap $ getValidCols remTableFlds
|
||||
return $ flip fmap remTableSelPermM $
|
||||
\rmSelPermM -> Right RelationshipFieldInfo
|
||||
\rmSelPermM -> SFRelationship RelationshipFieldInfo
|
||||
{ _rfiInfo = relInfo
|
||||
, _rfiAllowAgg = spiAllowAgg rmSelPermM
|
||||
, _rfiColumns = remTableColGNameMap
|
||||
@ -432,15 +449,37 @@ getSelPerm tableCache fields role selPermInfo = do
|
||||
, _rfiIsNullable = isRelNullable fields relInfo
|
||||
}
|
||||
|
||||
return (spiAllowAgg selPermInfo, cols <> relFlds)
|
||||
computedColFlds <- fmap catMaybes $ forM computedFields $ \info -> do
|
||||
let ComputedFieldInfo name function returnTy _ = info
|
||||
ComputedFieldFunction _ inputArgs tableArg _ = function
|
||||
inputArgSeq = mkFuncArgSeq $ functionArgsWithoutTableArg tableArg inputArgs
|
||||
fmap (SFComputedField . ComputedField name function inputArgSeq) <$>
|
||||
case returnTy of
|
||||
CFRScalar scalarTy -> pure $ Just $ CFTScalar scalarTy
|
||||
CFRSetofTable retTable -> do
|
||||
retTableInfo <- getTabInfo tableCache retTable
|
||||
let retTableSelPermM = getSelPermission retTableInfo role
|
||||
retTableFlds = _tiFieldInfoMap retTableInfo
|
||||
retTableColGNameMap =
|
||||
mkPGColGNameMap $ getValidCols retTableFlds
|
||||
pure $ flip fmap retTableSelPermM $
|
||||
\selPerm -> CFTTable ComputedFieldTable
|
||||
{ _cftTable = retTable
|
||||
, _cftCols = retTableColGNameMap
|
||||
, _cftPermFilter = spiFilter selPerm
|
||||
, _cftPermLimit = spiLimit selPerm
|
||||
}
|
||||
|
||||
return (spiAllowAgg selPermInfo, cols <> relFlds <> computedColFlds)
|
||||
where
|
||||
validRels = getValidRels fields
|
||||
validCols = getValidCols fields
|
||||
cols = catMaybes $ flip map validCols $
|
||||
\colInfo -> fmap Left $ bool Nothing (Just colInfo) $
|
||||
Set.member (pgiColumn colInfo) allowedCols
|
||||
cols = map SFPGColumn $ getColInfos (toList allowedCols) validCols
|
||||
computedFields = flip filter (getComputedFieldInfos fields) $
|
||||
\info -> _cfiName info `Set.member` allowedComputedFields
|
||||
|
||||
allowedCols = spiCols selPermInfo
|
||||
allowedComputedFields = spiComputedFields selPermInfo
|
||||
|
||||
mkInsCtx
|
||||
:: MonadError QErr m
|
||||
@ -512,7 +551,7 @@ mkAdminSelFlds fields tableCache = do
|
||||
let remoteTableFlds = _tiFieldInfoMap remoteTableInfo
|
||||
remoteTableColGNameMap =
|
||||
mkPGColGNameMap $ getValidCols remoteTableFlds
|
||||
return $ Right RelationshipFieldInfo
|
||||
return $ SFRelationship RelationshipFieldInfo
|
||||
{ _rfiInfo = relInfo
|
||||
, _rfiAllowAgg = True
|
||||
, _rfiColumns = remoteTableColGNameMap
|
||||
@ -521,11 +560,31 @@ mkAdminSelFlds fields tableCache = do
|
||||
, _rfiIsNullable = isRelNullable fields relInfo
|
||||
}
|
||||
|
||||
return $ colSelFlds <> relSelFlds
|
||||
computedColFlds <- forM computedCols $ \info -> do
|
||||
let ComputedFieldInfo name function returnTy _ = info
|
||||
ComputedFieldFunction _ inputArgs tableArg _ = function
|
||||
inputArgSeq = mkFuncArgSeq $ functionArgsWithoutTableArg tableArg inputArgs
|
||||
(SFComputedField . ComputedField name function inputArgSeq) <$>
|
||||
case returnTy of
|
||||
CFRScalar scalarTy -> pure $ CFTScalar scalarTy
|
||||
CFRSetofTable retTable -> do
|
||||
retTableInfo <- getTabInfo tableCache retTable
|
||||
let retTableFlds = _tiFieldInfoMap retTableInfo
|
||||
retTableColGNameMap =
|
||||
mkPGColGNameMap $ getValidCols retTableFlds
|
||||
pure $ CFTTable ComputedFieldTable
|
||||
{ _cftTable = retTable
|
||||
, _cftCols = retTableColGNameMap
|
||||
, _cftPermFilter = noFilter
|
||||
, _cftPermLimit = Nothing
|
||||
}
|
||||
|
||||
return $ colSelFlds <> relSelFlds <> computedColFlds
|
||||
where
|
||||
cols = getValidCols fields
|
||||
colSelFlds = map Left cols
|
||||
colSelFlds = map SFPGColumn cols
|
||||
validRels = getValidRels fields
|
||||
computedCols = getComputedFieldInfos fields
|
||||
|
||||
mkGCtxRole
|
||||
:: (MonadError QErr m)
|
||||
@ -737,7 +796,8 @@ mkGCtx tyAgg (RootFields queryFields mutationFields) insCtxMap =
|
||||
(Map.map fst queryFields) (Map.map fst mutationFields) insCtxMap
|
||||
where
|
||||
TyAgg tyInfos fldInfos scalars ordByEnums = tyAgg
|
||||
colTys = Set.fromList $ map pgiType $ lefts $ Map.elems fldInfos
|
||||
colTys = Set.fromList $ map pgiType $ mapMaybe (^? _RFPGColumn) $
|
||||
Map.elems fldInfos
|
||||
mkMutRoot =
|
||||
mkHsraObjTyInfo (Just "mutation root") (G.NamedType "mutation_root") Set.empty .
|
||||
mapFromL _fiName
|
||||
|
@ -270,7 +270,7 @@ mkBoolExpInp tn fields =
|
||||
boolExpTy = mkBoolExpTy tn
|
||||
|
||||
-- all the fields of this input object
|
||||
inpValues = combinators <> map mkFldExpInp fields
|
||||
inpValues = combinators <> mapMaybe mkFldExpInp fields
|
||||
|
||||
mk n ty = InpValInfo Nothing n Nothing $ G.toGT ty
|
||||
|
||||
@ -283,9 +283,10 @@ mkBoolExpInp tn fields =
|
||||
]
|
||||
|
||||
mkFldExpInp = \case
|
||||
Left (PGColumnInfo _ name colTy _ _) ->
|
||||
mk name (mkCompExpTy colTy)
|
||||
Right relationshipField ->
|
||||
SFPGColumn (PGColumnInfo _ name colTy _ _) ->
|
||||
Just $ mk name (mkCompExpTy colTy)
|
||||
SFRelationship relationshipField ->
|
||||
let relationshipName = riName $ _rfiInfo relationshipField
|
||||
remoteTable = riRTable $ _rfiInfo relationshipField
|
||||
in mk (mkRelName relationshipName) (mkBoolExpTy remoteTable)
|
||||
in Just $ mk (mkRelName relationshipName) (mkBoolExpTy remoteTable)
|
||||
SFComputedField _ -> Nothing -- TODO: support computed fields in bool exps
|
||||
|
@ -4,11 +4,16 @@ module Hasura.GraphQL.Schema.Common
|
||||
, fromInpValL
|
||||
|
||||
, RelationshipFieldInfo(..)
|
||||
, SelField
|
||||
, SelField(..)
|
||||
, _SFPGColumn
|
||||
, getPGColumnFields
|
||||
, getRelationshipFields
|
||||
, getComputedFields
|
||||
|
||||
, mkColumnType
|
||||
, mkRelName
|
||||
, mkAggRelName
|
||||
, mkComputedFieldName
|
||||
|
||||
, mkTableTy
|
||||
, mkTableEnumType
|
||||
@ -17,12 +22,17 @@ module Hasura.GraphQL.Schema.Common
|
||||
, mkColumnEnumVal
|
||||
, mkDescriptionWith
|
||||
, mkDescription
|
||||
|
||||
, mkFuncArgsTy
|
||||
) where
|
||||
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.Text as T
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import Control.Lens
|
||||
import Control.Lens.TH (makePrisms)
|
||||
|
||||
import Hasura.GraphQL.Resolve.Types
|
||||
import Hasura.GraphQL.Validate.Types
|
||||
import Hasura.Prelude
|
||||
@ -39,7 +49,21 @@ data RelationshipFieldInfo
|
||||
, _rfiIsNullable :: !Bool
|
||||
} deriving (Show, Eq)
|
||||
|
||||
type SelField = Either PGColumnInfo RelationshipFieldInfo
|
||||
data SelField
|
||||
= SFPGColumn !PGColumnInfo
|
||||
| SFRelationship !RelationshipFieldInfo
|
||||
| SFComputedField !ComputedField
|
||||
deriving (Show, Eq)
|
||||
$(makePrisms ''SelField)
|
||||
|
||||
getPGColumnFields :: [SelField] -> [PGColumnInfo]
|
||||
getPGColumnFields = mapMaybe (^? _SFPGColumn)
|
||||
|
||||
getRelationshipFields :: [SelField] -> [RelationshipFieldInfo]
|
||||
getRelationshipFields = mapMaybe (^? _SFRelationship)
|
||||
|
||||
getComputedFields :: [SelField] -> [ComputedField]
|
||||
getComputedFields = mapMaybe (^? _SFComputedField)
|
||||
|
||||
qualObjectToName :: (ToTxt a) => QualifiedObject a -> G.Name
|
||||
qualObjectToName = G.Name . snakeCaseQualObject
|
||||
@ -56,6 +80,9 @@ mkRelName rn = G.Name $ relNameToTxt rn
|
||||
mkAggRelName :: RelName -> G.Name
|
||||
mkAggRelName rn = G.Name $ relNameToTxt rn <> "_aggregate"
|
||||
|
||||
mkComputedFieldName :: ComputedFieldName -> G.Name
|
||||
mkComputedFieldName = G.Name . computedFieldNameToText
|
||||
|
||||
mkColumnType :: PGColumnType -> G.NamedType
|
||||
mkColumnType = \case
|
||||
PGColumnScalar scalarType -> mkScalarTy scalarType
|
||||
@ -82,3 +109,11 @@ mkDescriptionWith descM defaultTxt = G.Description $ case descM of
|
||||
|
||||
mkDescription :: PGDescription -> G.Description
|
||||
mkDescription = G.Description . getPGDescription
|
||||
|
||||
mkFuncArgsName :: QualifiedFunction -> G.Name
|
||||
mkFuncArgsName fn =
|
||||
qualObjectToName fn <> "_args"
|
||||
|
||||
mkFuncArgsTy :: QualifiedFunction -> G.NamedType
|
||||
mkFuncArgsTy =
|
||||
G.NamedType . mkFuncArgsName
|
||||
|
@ -3,6 +3,7 @@ module Hasura.GraphQL.Schema.Function
|
||||
, mkFuncArgsInp
|
||||
, mkFuncQueryFld
|
||||
, mkFuncAggQueryFld
|
||||
, mkFuncArgsTy
|
||||
) where
|
||||
|
||||
import qualified Data.Sequence as Seq
|
||||
@ -16,15 +17,6 @@ import Hasura.Prelude
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
|
||||
mkFuncArgsName :: QualifiedFunction -> G.Name
|
||||
mkFuncArgsName fn =
|
||||
qualObjectToName fn <> "_args"
|
||||
|
||||
mkFuncArgsTy :: QualifiedFunction -> G.NamedType
|
||||
mkFuncArgsTy =
|
||||
G.NamedType . mkFuncArgsName
|
||||
|
||||
|
||||
{-
|
||||
input function_args {
|
||||
arg1: arg-type1!
|
||||
@ -49,12 +41,10 @@ procFuncArgs argSeq f =
|
||||
let argT = "arg_" <> T.pack (show argNo)
|
||||
in (items <> pure (f fa argT), argNo + 1)
|
||||
|
||||
mkFuncArgsInp :: FunctionInfo -> Maybe InpObjTyInfo
|
||||
mkFuncArgsInp funcInfo =
|
||||
mkFuncArgsInp :: QualifiedFunction -> Seq.Seq FunctionArg -> Maybe InpObjTyInfo
|
||||
mkFuncArgsInp funcName funcArgs =
|
||||
bool (Just inpObj) Nothing $ null funcArgs
|
||||
where
|
||||
funcName = fiName funcInfo
|
||||
funcArgs = fiInputArgs funcInfo
|
||||
funcArgsTy = mkFuncArgsTy funcName
|
||||
|
||||
inpObj = mkHsraInpTyInfo Nothing funcArgsTy $
|
||||
@ -64,7 +54,7 @@ mkFuncArgsInp funcInfo =
|
||||
|
||||
mkInpVal fa t =
|
||||
InpValInfo Nothing (G.Name t) Nothing $
|
||||
G.toGT $ mkScalarTy $ faType fa
|
||||
G.toGT $ mkScalarTy $ _qptName $ faType fa
|
||||
|
||||
{-
|
||||
|
||||
|
@ -141,8 +141,8 @@ mkOrdByInpObj tn selFlds = (inpObjTy, ordByCtx)
|
||||
desc = G.Description $
|
||||
"ordering options when selecting data from " <>> tn
|
||||
|
||||
pgColFlds = lefts selFlds
|
||||
relFltr ty = flip filter (rights selFlds) $
|
||||
pgColFlds = getPGColumnFields selFlds
|
||||
relFltr ty = flip filter (getRelationshipFields selFlds) $
|
||||
\rf -> riType (_rfiInfo rf) == ty
|
||||
objRels = relFltr ObjRel
|
||||
arrRels = relFltr ArrRel
|
||||
|
@ -5,6 +5,8 @@ module Hasura.GraphQL.Schema.Select
|
||||
, mkTableAggFldsObj
|
||||
, mkTableColAggFldsObj
|
||||
|
||||
, functionArgsWithoutTableArg
|
||||
|
||||
, mkSelFld
|
||||
, mkAggSelFld
|
||||
, mkSelFldPKey
|
||||
@ -14,8 +16,10 @@ module Hasura.GraphQL.Schema.Select
|
||||
|
||||
import qualified Data.HashMap.Strict as Map
|
||||
import qualified Data.HashSet as Set
|
||||
import qualified Data.Sequence as Seq
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import Hasura.GraphQL.Resolve.Types
|
||||
import Hasura.GraphQL.Schema.BoolExp
|
||||
import Hasura.GraphQL.Schema.Common
|
||||
import Hasura.GraphQL.Schema.OrderBy
|
||||
@ -68,6 +72,43 @@ mkPGColFld colInfo =
|
||||
notNullTy = G.toGT $ G.toNT columnType
|
||||
nullTy = G.toGT columnType
|
||||
|
||||
functionArgsWithoutTableArg
|
||||
:: FunctionTableArgument -> Seq.Seq FunctionArg -> Seq.Seq FunctionArg
|
||||
functionArgsWithoutTableArg tableArg inputArgs = Seq.fromList $
|
||||
case tableArg of
|
||||
FTAFirst -> tail $ toList inputArgs
|
||||
FTANamed argName _ ->
|
||||
filter ((/=) (Just argName) . faName) $ toList inputArgs
|
||||
|
||||
mkComputedFieldFld :: ComputedField -> ObjFldInfo
|
||||
mkComputedFieldFld field =
|
||||
uncurry (mkHsraObjFldInfo (Just desc) fieldName) $ case fieldType of
|
||||
CFTScalar scalarTy ->
|
||||
let inputParams = mkPGColParams (PGColumnScalar scalarTy)
|
||||
<> fromInpValL (maybeToList maybeFunctionInputArg)
|
||||
in (inputParams, G.toGT $ mkScalarTy scalarTy)
|
||||
CFTTable computedFieldtable ->
|
||||
let table = _cftTable computedFieldtable
|
||||
in ( fromInpValL $ maybeToList maybeFunctionInputArg <> mkSelArgs table
|
||||
, G.toGT $ G.toLT $ G.toNT $ mkTableTy table
|
||||
)
|
||||
where
|
||||
columnDescription = "A computed field, executes function " <>> qf
|
||||
desc = mkDescriptionWith (_cffDescription function) columnDescription
|
||||
fieldName = mkComputedFieldName name
|
||||
ComputedField name function _ fieldType = field
|
||||
qf = _cffName function
|
||||
|
||||
maybeFunctionInputArg =
|
||||
let funcArgDesc = G.Description $ "input parameters for function " <>> qf
|
||||
inputValue = InpValInfo (Just funcArgDesc) "args" Nothing $
|
||||
G.toGT $ G.toNT $ mkFuncArgsTy qf
|
||||
inputArgs = _cffInputArgs function
|
||||
tableArgument = _cffTableArgument function
|
||||
withoutTableArgs = functionArgsWithoutTableArg tableArgument inputArgs
|
||||
in bool (Just inputValue) Nothing $ null withoutTableArgs
|
||||
|
||||
|
||||
-- where: table_bool_exp
|
||||
-- limit: Int
|
||||
-- offset: Int
|
||||
@ -143,7 +184,10 @@ mkTableObj
|
||||
mkTableObj tn descM allowedFlds =
|
||||
mkObjTyInfo (Just desc) (mkTableTy tn) Set.empty (mapFromL _fiName flds) TLHasuraType
|
||||
where
|
||||
flds = concatMap (either (pure . mkPGColFld) mkRelationshipField') allowedFlds
|
||||
flds = pgColFlds <> relFlds <> computedFlds
|
||||
pgColFlds = map mkPGColFld $ getPGColumnFields allowedFlds
|
||||
relFlds = concatMap mkRelationshipField' $ getRelationshipFields allowedFlds
|
||||
computedFlds = map mkComputedFieldFld $ getComputedFields allowedFlds
|
||||
mkRelationshipField' (RelationshipFieldInfo relInfo allowAgg _ _ _ isNullable) =
|
||||
mkRelationshipField allowAgg relInfo isNullable
|
||||
desc = mkDescriptionWith descM $ "columns and relationships of " <>> tn
|
||||
|
@ -383,7 +383,7 @@ fromScalarTyDef (G.ScalarTypeDefinition descM n _) loc =
|
||||
"Float" -> return PGFloat
|
||||
"String" -> return PGText
|
||||
"Boolean" -> return PGBoolean
|
||||
_ -> return $ txtToPgColTy $ G.unName n
|
||||
_ -> return $ textToPGScalarType $ G.unName n
|
||||
|
||||
data TypeInfo
|
||||
= TIScalar !ScalarTyInfo
|
||||
|
@ -7,6 +7,7 @@ module Hasura.Prelude
|
||||
, bsToTxt
|
||||
, txtToBs
|
||||
, spanMaybeM
|
||||
, findWithIndex
|
||||
) where
|
||||
|
||||
import Control.Applicative as M (Alternative (..))
|
||||
@ -27,8 +28,8 @@ import Data.Foldable as M (asum, foldrM, for_, toList,
|
||||
import Data.Function as M (on, (&))
|
||||
import Data.Functor as M (($>), (<&>))
|
||||
import Data.Hashable as M (Hashable)
|
||||
import Data.List as M (find, foldl', group,
|
||||
intercalate, intersect,
|
||||
import Data.List as M (find, findIndex, foldl',
|
||||
group, intercalate, intersect,
|
||||
lookup, sort, sortBy, sortOn,
|
||||
union, unionBy, (\\))
|
||||
import Data.Maybe as M (catMaybes, fromMaybe, isJust,
|
||||
@ -77,3 +78,9 @@ spanMaybeM f = go . toList
|
||||
go l@(x:xs) = f x >>= \case
|
||||
Just y -> first (y:) <$> go xs
|
||||
Nothing -> pure ([], l)
|
||||
|
||||
findWithIndex :: (a -> Bool) -> [a] -> Maybe (a, Int)
|
||||
findWithIndex p l = do
|
||||
v <- find p l
|
||||
i <- findIndex p l
|
||||
pure (v, i)
|
||||
|
281
server/src-lib/Hasura/RQL/DDL/ComputedField.hs
Normal file
281
server/src-lib/Hasura/RQL/DDL/ComputedField.hs
Normal file
@ -0,0 +1,281 @@
|
||||
{- |
|
||||
Description: Add/Drop computed fields in metadata
|
||||
-}
|
||||
module Hasura.RQL.DDL.ComputedField
|
||||
( AddComputedField(..)
|
||||
, ComputedFieldDefinition(..)
|
||||
, runAddComputedField
|
||||
, addComputedFieldP2Setup
|
||||
, DropComputedField
|
||||
, dropComputedFieldFromCatalog
|
||||
, runDropComputedField
|
||||
) where
|
||||
|
||||
import Hasura.Prelude
|
||||
|
||||
import Hasura.EncJSON
|
||||
import Hasura.RQL.DDL.Deps
|
||||
import Hasura.RQL.DDL.Permission.Internal
|
||||
import Hasura.RQL.DDL.Schema.Function (RawFunctionInfo (..),
|
||||
fetchRawFunctioInfo,
|
||||
mkFunctionArgs)
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.TH
|
||||
import Language.Haskell.TH.Syntax (Lift)
|
||||
|
||||
import qualified Control.Monad.Validate as MV
|
||||
import qualified Data.Sequence as Seq
|
||||
import qualified Data.Text as T
|
||||
import qualified Database.PG.Query as Q
|
||||
import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
data ComputedFieldDefinition
|
||||
= ComputedFieldDefinition
|
||||
{ _cfdFunction :: !QualifiedFunction
|
||||
, _cfdTableArgument :: !(Maybe FunctionArgName)
|
||||
} deriving (Show, Eq, Lift)
|
||||
$(deriveJSON (aesonDrop 4 snakeCase) ''ComputedFieldDefinition)
|
||||
|
||||
data AddComputedField
|
||||
= AddComputedField
|
||||
{ _afcTable :: !QualifiedTable
|
||||
, _afcName :: !ComputedFieldName
|
||||
, _afcDefinition :: !ComputedFieldDefinition
|
||||
, _afcComment :: !(Maybe Text)
|
||||
} deriving (Show, Eq, Lift)
|
||||
$(deriveJSON (aesonDrop 4 snakeCase) ''AddComputedField)
|
||||
|
||||
runAddComputedField
|
||||
:: (QErrM m, CacheRWM m, MonadTx m , UserInfoM m)
|
||||
=> AddComputedField -> m EncJSON
|
||||
runAddComputedField q = do
|
||||
addComputedFieldP1 q
|
||||
addComputedFieldP2 q
|
||||
|
||||
addComputedFieldP1
|
||||
:: (UserInfoM m, QErrM m, CacheRM m)
|
||||
=> AddComputedField -> m ()
|
||||
addComputedFieldP1 q = do
|
||||
adminOnly
|
||||
tableInfo <- withPathK "table" $ askTabInfo tableName
|
||||
withPathK "name" $ checkForFieldConflict tableInfo $
|
||||
fromComputedField computedFieldName
|
||||
where
|
||||
AddComputedField tableName computedFieldName _ _ = q
|
||||
|
||||
addComputedFieldP2
|
||||
:: (QErrM m, CacheRWM m, MonadTx m)
|
||||
=> AddComputedField -> m EncJSON
|
||||
addComputedFieldP2 q = withPathK "definition" $ do
|
||||
rawFunctionInfo <- withPathK "function" $
|
||||
fetchRawFunctioInfo $ _cfdFunction definition
|
||||
addComputedFieldP2Setup table computedField definition rawFunctionInfo comment
|
||||
addComputedFieldToCatalog q
|
||||
return successMsg
|
||||
where
|
||||
AddComputedField table computedField definition comment = q
|
||||
|
||||
data ComputedFieldValidateError
|
||||
= CFVENotValidGraphQLName !ComputedFieldName
|
||||
| CFVEInvalidTableArgument !InvalidTableArgument
|
||||
| CFVENotBaseReturnType !PGScalarType
|
||||
| CFVEReturnTableNotFound !QualifiedTable
|
||||
| CFVENoInputArguments
|
||||
| CFVEFunctionVolatile
|
||||
deriving (Show, Eq)
|
||||
|
||||
data InvalidTableArgument
|
||||
= ITANotFound !FunctionArgName
|
||||
| ITANotComposite !FunctionTableArgument
|
||||
| ITANotTable !QualifiedTable !FunctionTableArgument
|
||||
deriving (Show, Eq)
|
||||
|
||||
showError :: QualifiedFunction -> ComputedFieldValidateError -> Text
|
||||
showError qf = \case
|
||||
CFVENotValidGraphQLName computedField ->
|
||||
computedField <<> " is not valid GraphQL name"
|
||||
CFVEInvalidTableArgument (ITANotFound argName) ->
|
||||
argName <<> " is not an input argument of " <> qf <<> " function"
|
||||
CFVEInvalidTableArgument (ITANotComposite functionArg) ->
|
||||
showFunctionTableArgument functionArg <> " is not COMPOSITE type"
|
||||
CFVEInvalidTableArgument (ITANotTable ty functionArg) ->
|
||||
showFunctionTableArgument functionArg <> " of type " <> ty
|
||||
<<> " is not the table to which the computed field is being added"
|
||||
CFVENotBaseReturnType scalarType ->
|
||||
"the function " <> qf <<> " returning type " <> toSQLTxt scalarType
|
||||
<> " is not a BASE type"
|
||||
CFVEReturnTableNotFound table ->
|
||||
"the function " <> qf <<> " returning set of table " <> table
|
||||
<<> " is not tracked or not found in database"
|
||||
CFVENoInputArguments ->
|
||||
"the function " <> qf <<> " has no input arguments"
|
||||
CFVEFunctionVolatile ->
|
||||
"the function " <> qf <<> " is of type VOLATILE; cannot be added as a computed field"
|
||||
where
|
||||
showFunctionTableArgument = \case
|
||||
FTAFirst -> "first argument of the function " <>> qf
|
||||
FTANamed argName _ -> argName <<> " argument of the function " <>> qf
|
||||
|
||||
addComputedFieldP2Setup
|
||||
:: (QErrM m, CacheRWM m)
|
||||
=> QualifiedTable
|
||||
-> ComputedFieldName
|
||||
-> ComputedFieldDefinition
|
||||
-> RawFunctionInfo
|
||||
-> Maybe Text
|
||||
-> m ()
|
||||
addComputedFieldP2Setup table computedField definition rawFunctionInfo comment = do
|
||||
sc <- askSchemaCache
|
||||
computedFieldInfo <- either (throw400 NotSupported . showErrors) pure
|
||||
=<< MV.runValidateT (mkComputedFieldInfo sc)
|
||||
addComputedFieldToCache table computedFieldInfo
|
||||
where
|
||||
inputArgNames = rfiInputArgNames rawFunctionInfo
|
||||
ComputedFieldDefinition function maybeTableArg = definition
|
||||
functionReturnType = QualifiedPGType (rfiReturnTypeSchema rawFunctionInfo)
|
||||
(rfiReturnTypeName rawFunctionInfo)
|
||||
(rfiReturnTypeType rawFunctionInfo)
|
||||
|
||||
computedFieldGraphQLName = G.Name $ computedFieldNameToText computedField
|
||||
|
||||
mkComputedFieldInfo :: (MV.MonadValidate [ComputedFieldValidateError] m)
|
||||
=> SchemaCache -> m ComputedFieldInfo
|
||||
mkComputedFieldInfo sc = do
|
||||
-- Check if computed field name is a valid GraphQL name
|
||||
unless (G.isValidName computedFieldGraphQLName) $
|
||||
MV.dispute $ pure $ CFVENotValidGraphQLName computedField
|
||||
|
||||
-- Check if function is VOLATILE
|
||||
when (rfiFunctionType rawFunctionInfo == FTVOLATILE) $
|
||||
MV.dispute $ pure CFVEFunctionVolatile
|
||||
|
||||
-- Validate and resolve return type
|
||||
returnType <-
|
||||
if rfiReturnsTable rawFunctionInfo then do
|
||||
let returnTable = typeToTable functionReturnType
|
||||
unless (isTableTracked sc returnTable) $ MV.dispute $ pure $
|
||||
CFVEReturnTableNotFound returnTable
|
||||
pure $ CFRSetofTable returnTable
|
||||
else do
|
||||
let scalarType = _qptName functionReturnType
|
||||
unless (isBaseType functionReturnType) $ MV.dispute $ pure $
|
||||
CFVENotBaseReturnType scalarType
|
||||
pure $ CFRScalar scalarType
|
||||
|
||||
-- Validate and resolve table argument
|
||||
let inputArgs = mkFunctionArgs (rfiDefaultArgs rawFunctionInfo)
|
||||
(rfiInputArgTypes rawFunctionInfo) inputArgNames
|
||||
tableArgument <- case maybeTableArg of
|
||||
Just argName ->
|
||||
case findWithIndex (maybe False (argName ==) . faName) inputArgs of
|
||||
Just (tableArg, index) -> do
|
||||
let functionTableArg = FTANamed argName index
|
||||
validateTableArgumentType functionTableArg $ faType tableArg
|
||||
pure functionTableArg
|
||||
Nothing ->
|
||||
MV.refute $ pure $ CFVEInvalidTableArgument $ ITANotFound argName
|
||||
Nothing -> do
|
||||
case inputArgs of
|
||||
[] -> MV.dispute $ pure CFVENoInputArguments
|
||||
(firstArg:_) ->
|
||||
validateTableArgumentType FTAFirst $ faType firstArg
|
||||
pure FTAFirst
|
||||
|
||||
|
||||
let computedFieldFunction =
|
||||
ComputedFieldFunction function (Seq.fromList inputArgs) tableArgument $
|
||||
rfiDescription rawFunctionInfo
|
||||
|
||||
pure $ ComputedFieldInfo computedField computedFieldFunction returnType comment
|
||||
|
||||
validateTableArgumentType :: (MV.MonadValidate [ComputedFieldValidateError] m)
|
||||
=> FunctionTableArgument
|
||||
-> QualifiedPGType
|
||||
-> m ()
|
||||
validateTableArgumentType tableArg qpt = do
|
||||
when (_qptType qpt /= PGKindComposite) $
|
||||
MV.dispute $ pure $ CFVEInvalidTableArgument $ ITANotComposite tableArg
|
||||
let typeTable = typeToTable qpt
|
||||
unless (table == typeTable) $
|
||||
MV.dispute $ pure $ CFVEInvalidTableArgument $ ITANotTable typeTable tableArg
|
||||
|
||||
showErrors :: [ComputedFieldValidateError] -> Text
|
||||
showErrors allErrors =
|
||||
"the computed field " <> computedField <<> " cannot be added to table "
|
||||
<> table <<> reasonMessage
|
||||
where
|
||||
reasonMessage = case allErrors of
|
||||
[singleError] -> " because " <> showError function singleError
|
||||
_ -> " for the following reasons: \n" <> T.unlines
|
||||
(map ((" • " <>) . showError function) allErrors)
|
||||
|
||||
addComputedFieldToCatalog
|
||||
:: MonadTx m
|
||||
=> AddComputedField -> m ()
|
||||
addComputedFieldToCatalog q =
|
||||
liftTx $ Q.withQE defaultTxErrorHandler
|
||||
[Q.sql|
|
||||
INSERT INTO hdb_catalog.hdb_computed_field
|
||||
(table_schema, table_name, computed_field_name, definition, comment)
|
||||
VALUES ($1, $2, $3, $4, $5)
|
||||
|] (schemaName, tableName, computedField, Q.AltJ definition, comment) True
|
||||
where
|
||||
QualifiedObject schemaName tableName = table
|
||||
AddComputedField table computedField definition comment = q
|
||||
|
||||
data DropComputedField
|
||||
= DropComputedField
|
||||
{ _dccTable :: !QualifiedTable
|
||||
, _dccName :: !ComputedFieldName
|
||||
, _dccCascade :: !Bool
|
||||
} deriving (Show, Eq, Lift)
|
||||
$(deriveToJSON (aesonDrop 4 snakeCase) ''DropComputedField)
|
||||
|
||||
instance FromJSON DropComputedField where
|
||||
parseJSON = withObject "Object" $ \o ->
|
||||
DropComputedField
|
||||
<$> o .: "table"
|
||||
<*> o .: "name"
|
||||
<*> o .:? "cascade" .!= False
|
||||
|
||||
runDropComputedField
|
||||
:: (UserInfoM m, CacheRWM m, MonadTx m)
|
||||
=> DropComputedField -> m EncJSON
|
||||
runDropComputedField (DropComputedField table computedField cascade) = do
|
||||
-- Validation
|
||||
adminOnly
|
||||
fields <- withPathK "table" $ _tiFieldInfoMap <$> askTabInfo table
|
||||
void $ withPathK "name" $ askComputedFieldInfo fields computedField
|
||||
|
||||
-- Dependencies check
|
||||
sc <- askSchemaCache
|
||||
let deps = getDependentObjs sc $ SOTableObj table $ TOComputedField computedField
|
||||
when (not cascade && not (null deps)) $ reportDeps deps
|
||||
mapM_ purgeComputedFieldDependency deps
|
||||
|
||||
deleteComputedFieldFromCache table computedField
|
||||
dropComputedFieldFromCatalog table computedField
|
||||
pure successMsg
|
||||
where
|
||||
purgeComputedFieldDependency = \case
|
||||
SOTableObj qt (TOPerm role permType) | qt == table -> do
|
||||
liftTx $ dropPermFromCatalog qt role permType
|
||||
withPermType permType delPermFromCache role qt
|
||||
d -> throw500 $ "unexpected dependency for computed field "
|
||||
<> computedField <<> "; " <> reportSchemaObj d
|
||||
|
||||
dropComputedFieldFromCatalog
|
||||
:: MonadTx m
|
||||
=> QualifiedTable -> ComputedFieldName -> m ()
|
||||
dropComputedFieldFromCatalog (QualifiedObject schema table) computedField =
|
||||
liftTx $ Q.withQE defaultTxErrorHandler
|
||||
[Q.sql|
|
||||
DELETE FROM hdb_catalog.hdb_computed_field
|
||||
WHERE table_schema = $1
|
||||
AND table_name = $2
|
||||
AND computed_field_name = $3
|
||||
|] (schema, table, computedField) True
|
@ -41,6 +41,7 @@ import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
|
||||
import qualified Database.PG.Query as Q
|
||||
import qualified Hasura.RQL.DDL.ComputedField as DCC
|
||||
import qualified Hasura.RQL.DDL.EventTrigger as DE
|
||||
import qualified Hasura.RQL.DDL.Permission as DP
|
||||
import qualified Hasura.RQL.DDL.Permission.Internal as DP
|
||||
@ -513,9 +514,10 @@ runDropInconsistentMetadata _ = do
|
||||
|
||||
purgeMetadataObj :: MonadTx m => MetadataObjId -> m ()
|
||||
purgeMetadataObj = liftTx . \case
|
||||
(MOTable qt) -> DS.deleteTableFromCatalog qt
|
||||
(MOFunction qf) -> DS.delFunctionFromCatalog qf
|
||||
(MORemoteSchema rsn) -> DRS.removeRemoteSchemaFromCatalog rsn
|
||||
(MOTableObj qt (MTORel rn _)) -> DR.delRelFromCatalog qt rn
|
||||
(MOTableObj qt (MTOPerm rn pt)) -> DP.dropPermFromCatalog qt rn pt
|
||||
(MOTableObj _ (MTOTrigger trn)) -> DE.delEventTriggerFromCatalog trn
|
||||
(MOTable qt) -> DS.deleteTableFromCatalog qt
|
||||
(MOFunction qf) -> DS.delFunctionFromCatalog qf
|
||||
(MORemoteSchema rsn) -> DRS.removeRemoteSchemaFromCatalog rsn
|
||||
(MOTableObj qt (MTORel rn _)) -> DR.delRelFromCatalog qt rn
|
||||
(MOTableObj qt (MTOPerm rn pt)) -> DP.dropPermFromCatalog qt rn pt
|
||||
(MOTableObj _ (MTOTrigger trn)) -> DE.delEventTriggerFromCatalog trn
|
||||
(MOTableObj qt (MTOComputedField ccn)) -> DCC.dropComputedFieldFromCatalog qt ccn
|
||||
|
@ -53,7 +53,7 @@ import Hasura.EncJSON
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Permission.Internal
|
||||
import Hasura.RQL.DDL.Permission.Triggers
|
||||
import Hasura.RQL.DML.Internal
|
||||
import Hasura.RQL.DML.Internal hiding (askPermInfo)
|
||||
import Hasura.RQL.GBoolExp
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.SQL.Types
|
||||
@ -201,20 +201,30 @@ instance IsPerm InsPerm where
|
||||
-- Select constraint
|
||||
data SelPerm
|
||||
= SelPerm
|
||||
{ spColumns :: !PermColSpec -- Allowed columns
|
||||
, spFilter :: !BoolExp -- Filter expression
|
||||
, spLimit :: !(Maybe Int) -- Limit value
|
||||
, spAllowAggregations :: !(Maybe Bool) -- Allow aggregation
|
||||
{ spColumns :: !PermColSpec -- ^ Allowed columns
|
||||
, spFilter :: !BoolExp -- ^ Filter expression
|
||||
, spLimit :: !(Maybe Int) -- ^ Limit value
|
||||
, spAllowAggregations :: !Bool -- ^ Allow aggregation
|
||||
, spComputedFields :: ![ComputedFieldName] -- ^ Allowed computed fields
|
||||
} deriving (Show, Eq, Lift)
|
||||
$(deriveToJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''SelPerm)
|
||||
|
||||
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''SelPerm)
|
||||
instance FromJSON SelPerm where
|
||||
parseJSON = withObject "SelPerm" $ \o ->
|
||||
SelPerm
|
||||
<$> o .: "columns"
|
||||
<*> o .: "filter"
|
||||
<*> o .:? "limit"
|
||||
<*> o .:? "allow_aggregations" .!= False
|
||||
<*> o .:? "computed_fields" .!= []
|
||||
|
||||
buildSelPermInfo
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> TableInfo PGColumnInfo
|
||||
=> RoleName
|
||||
-> TableInfo PGColumnInfo
|
||||
-> SelPerm
|
||||
-> m (WithDeps SelPermInfo)
|
||||
buildSelPermInfo tabInfo sp = do
|
||||
buildSelPermInfo role tabInfo sp = do
|
||||
let pgCols = convColSpec fieldInfoMap $ spColumns sp
|
||||
|
||||
(be, beDeps) <- withPathK "filter" $
|
||||
@ -224,18 +234,35 @@ buildSelPermInfo tabInfo sp = do
|
||||
void $ withPathK "columns" $ indexedForM pgCols $ \pgCol ->
|
||||
askPGType fieldInfoMap pgCol autoInferredErr
|
||||
|
||||
-- validate computed fields
|
||||
withPathK "computed_fields" $ indexedForM_ computedFields $ \name -> do
|
||||
computedFieldInfo <- askComputedFieldInfo fieldInfoMap name
|
||||
case _cfiReturnType computedFieldInfo of
|
||||
CFRScalar _ -> pure ()
|
||||
CFRSetofTable returnTable -> do
|
||||
returnTableInfo <- askTabInfo returnTable
|
||||
let function = _cffName $ _cfiFunction $ computedFieldInfo
|
||||
errModifier e = "computed field " <> name <<> " executes function "
|
||||
<> function <<> " which returns set of table "
|
||||
<> returnTable <<> "; " <> e
|
||||
void $ modifyErr errModifier $ askPermInfo returnTableInfo role PASelect
|
||||
|
||||
let deps = mkParentDep tn : beDeps ++ map (mkColDep DRUntyped tn) pgCols
|
||||
++ map (mkComputedFieldDep DRUntyped tn) computedFields
|
||||
depHeaders = getDependentHeaders $ spFilter sp
|
||||
mLimit = spLimit sp
|
||||
|
||||
withPathK "limit" $ mapM_ onlyPositiveInt mLimit
|
||||
|
||||
return (SelPermInfo (HS.fromList pgCols) tn be mLimit allowAgg depHeaders, deps)
|
||||
|
||||
return ( SelPermInfo (HS.fromList pgCols) (HS.fromList computedFields)
|
||||
tn be mLimit allowAgg depHeaders
|
||||
, deps
|
||||
)
|
||||
where
|
||||
tn = _tiName tabInfo
|
||||
fieldInfoMap = _tiFieldInfoMap tabInfo
|
||||
allowAgg = or $ spAllowAggregations sp
|
||||
allowAgg = spAllowAggregations sp
|
||||
computedFields = spComputedFields sp
|
||||
autoInferredErr = "permissions for relationships are automatically inferred"
|
||||
|
||||
type SelPermDef = PermDef SelPerm
|
||||
@ -256,8 +283,8 @@ instance IsPerm SelPerm where
|
||||
|
||||
permAccessor = PASelect
|
||||
|
||||
buildPermInfo ti (PermDef _ a _) =
|
||||
buildSelPermInfo ti a
|
||||
buildPermInfo ti (PermDef rn a _) =
|
||||
buildSelPermInfo rn ti a
|
||||
|
||||
buildDropPermP1Res =
|
||||
void . dropPermP1
|
||||
|
@ -33,6 +33,7 @@ import qualified Hasura.GraphQL.Schema as GS
|
||||
|
||||
import Hasura.Db
|
||||
import Hasura.GraphQL.RemoteServer
|
||||
import Hasura.RQL.DDL.ComputedField
|
||||
import Hasura.RQL.DDL.Deps
|
||||
import Hasura.RQL.DDL.EventTrigger
|
||||
import Hasura.RQL.DDL.Permission
|
||||
@ -69,6 +70,7 @@ buildSchemaCacheWithOptions withSetup = do
|
||||
-- fetch all catalog metadata
|
||||
CatalogMetadata tables relationships permissions
|
||||
eventTriggers remoteSchemas functions fkeys' allowlistDefs
|
||||
computedFields
|
||||
<- liftTx fetchCatalogData
|
||||
|
||||
let fkeys = HS.fromList fkeys'
|
||||
@ -121,19 +123,31 @@ buildSchemaCacheWithOptions withSetup = do
|
||||
mkAllTriggersQ trn qt allCols (stringifyNum sqlGenCtx) (etcDefinition etc)
|
||||
|
||||
-- sql functions
|
||||
forM_ functions $ \(CatalogFunction qf rawfiM) -> do
|
||||
forM_ functions $ \(CatalogFunction qf funcDefs) -> do
|
||||
let def = toJSON $ TrackFunction qf
|
||||
mkInconsObj =
|
||||
InconsistentMetadataObj (MOFunction qf) MOTFunction def
|
||||
modifyErr (\e -> "function " <> qf <<> "; " <> e) $
|
||||
withSchemaObject_ mkInconsObj $ do
|
||||
rawfi <- onNothing rawfiM $
|
||||
throw400 NotExists $ "no such function exists in postgres : " <>> qf
|
||||
rawfi <- handleMultipleFunctions qf funcDefs
|
||||
trackFunctionP2Setup qf rawfi
|
||||
|
||||
-- allow list
|
||||
replaceAllowlist $ concatMap _cdQueries allowlistDefs
|
||||
|
||||
-- computedFields
|
||||
forM_ computedFields $ \(CatalogComputedField column funcDefs) -> do
|
||||
let AddComputedField qt name def comment = column
|
||||
qf = _cfdFunction def
|
||||
mkInconsObj =
|
||||
InconsistentMetadataObj (MOTableObj qt $ MTOComputedField name)
|
||||
MOTComputedField $ toJSON column
|
||||
modifyErr (\e -> "computed field " <> name <<> "; " <> e) $
|
||||
withSchemaObject_ mkInconsObj $ do
|
||||
rawfi <- handleMultipleFunctions qf funcDefs
|
||||
addComputedFieldP2Setup qt name def rawfi comment
|
||||
|
||||
|
||||
-- build GraphQL context with tables and functions
|
||||
GS.buildGCtxMapPG
|
||||
|
||||
@ -229,7 +243,7 @@ withMetadataCheck cascade action = do
|
||||
oldMeta = flip filter oldMetaU $ \tm -> tmTable tm `elem` existingTables
|
||||
schemaDiff = getSchemaDiff oldMeta newMeta
|
||||
existingFuncs = M.keys $ scFunctions sc
|
||||
oldFuncMeta = flip filter oldFuncMetaU $ \fm -> funcFromMeta fm `elem` existingFuncs
|
||||
oldFuncMeta = flip filter oldFuncMetaU $ \fm -> fmFunction fm `elem` existingFuncs
|
||||
FunctionDiff droppedFuncs alteredFuncs = getFuncDiff oldFuncMeta newFuncMeta
|
||||
overloadedFuncs = getOverloadedFuncs existingFuncs newFuncMeta
|
||||
|
||||
@ -337,6 +351,10 @@ purgeDependentObject schemaObjId = case schemaObjId of
|
||||
liftTx $ delEventTriggerFromCatalog trn
|
||||
delEventTriggerFromCache qt trn
|
||||
|
||||
(SOTableObj qt (TOComputedField ccn)) -> do
|
||||
deleteComputedFieldFromCache qt ccn
|
||||
dropComputedFieldFromCatalog qt ccn
|
||||
|
||||
_ -> throw500 $
|
||||
"unexpected dependent object : " <> reportSchemaObj schemaObjId
|
||||
|
||||
|
@ -3,19 +3,20 @@ module Hasura.RQL.DDL.Schema.Diff
|
||||
, PGColMeta(..)
|
||||
, ConstraintMeta(..)
|
||||
, fetchTableMeta
|
||||
, ComputedFieldMeta(..)
|
||||
|
||||
, getDifference
|
||||
|
||||
, TableDiff(..)
|
||||
, getTableDiff
|
||||
, getTableChangeDeps
|
||||
, ComputedFieldDiff(..)
|
||||
|
||||
, SchemaDiff(..)
|
||||
, getSchemaDiff
|
||||
, getSchemaChangeDeps
|
||||
|
||||
, FunctionMeta(..)
|
||||
, funcFromMeta
|
||||
, fetchFunctionMeta
|
||||
, FunctionDiff(..)
|
||||
, getFuncDiff
|
||||
@ -29,11 +30,13 @@ import Hasura.SQL.Types
|
||||
|
||||
import qualified Database.PG.Query as Q
|
||||
|
||||
import Control.Arrow ((&&&))
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.TH
|
||||
|
||||
import qualified Data.HashMap.Strict as M
|
||||
import qualified Data.HashSet as HS
|
||||
import qualified Data.List.NonEmpty as NE
|
||||
|
||||
data PGColMeta
|
||||
= PGColMeta
|
||||
@ -44,7 +47,6 @@ data PGColMeta
|
||||
, pcmReferences :: ![QualifiedTable]
|
||||
, pcmDescription :: !(Maybe PGDescription)
|
||||
} deriving (Show, Eq)
|
||||
|
||||
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''PGColMeta)
|
||||
|
||||
data ConstraintMeta
|
||||
@ -53,25 +55,41 @@ data ConstraintMeta
|
||||
, cmOid :: !Int
|
||||
, cmType :: !ConstraintType
|
||||
} deriving (Show, Eq)
|
||||
|
||||
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''ConstraintMeta)
|
||||
|
||||
data FunctionMeta
|
||||
= FunctionMeta
|
||||
{ fmOid :: !Int
|
||||
, fmFunction :: !QualifiedFunction
|
||||
, fmType :: !FunctionType
|
||||
, fmDescription :: !(Maybe PGDescription)
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 2 snakeCase) ''FunctionMeta)
|
||||
|
||||
data ComputedFieldMeta
|
||||
= ComputedFieldMeta
|
||||
{ ccmName :: !ComputedFieldName
|
||||
, ccmFunctionMeta :: !FunctionMeta
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''ComputedFieldMeta)
|
||||
|
||||
data TableMeta
|
||||
= TableMeta
|
||||
{ tmOid :: !Int
|
||||
, tmTable :: !QualifiedTable
|
||||
, tmDescription :: !(Maybe PGDescription)
|
||||
, tmColumns :: ![PGColMeta]
|
||||
, tmConstraints :: ![ConstraintMeta]
|
||||
, tmForeignKeys :: ![ForeignKey]
|
||||
{ tmOid :: !Int
|
||||
, tmTable :: !QualifiedTable
|
||||
, tmDescription :: !(Maybe PGDescription)
|
||||
, tmColumns :: ![PGColMeta]
|
||||
, tmConstraints :: ![ConstraintMeta]
|
||||
, tmForeignKeys :: ![ForeignKey]
|
||||
, tmComputedFields :: ![ComputedFieldMeta]
|
||||
} deriving (Show, Eq)
|
||||
|
||||
fetchTableMeta :: Q.Tx [TableMeta]
|
||||
fetchTableMeta = do
|
||||
res <- Q.listQ $(Q.sqlFromFile "src-rsr/table_meta.sql") () False
|
||||
forM res $ \(ts, tn, toid, descM, cols, constrnts, fkeys) ->
|
||||
forM res $ \(ts, tn, toid, descM, cols, constrnts, fkeys, computedCols) ->
|
||||
return $ TableMeta toid (QualifiedObject ts tn) descM (Q.getAltJ cols)
|
||||
(Q.getAltJ constrnts) (Q.getAltJ fkeys)
|
||||
(Q.getAltJ constrnts) (Q.getAltJ fkeys) (Q.getAltJ computedCols)
|
||||
|
||||
getOverlap :: (Eq k, Hashable k) => (v -> k) -> [v] -> [v] -> [(v, v)]
|
||||
getOverlap getKey left right =
|
||||
@ -85,6 +103,13 @@ getDifference getKey left right =
|
||||
where
|
||||
mkMap = M.fromList . map (\v -> (getKey v, v))
|
||||
|
||||
data ComputedFieldDiff
|
||||
= ComputedFieldDiff
|
||||
{ _cfdDropped :: [ComputedFieldName]
|
||||
, _cfdAltered :: [(ComputedFieldMeta, ComputedFieldMeta)]
|
||||
, _cfdOverloaded :: [(ComputedFieldName, QualifiedFunction)]
|
||||
} deriving (Show, Eq)
|
||||
|
||||
data TableDiff
|
||||
= TableDiff
|
||||
{ _tdNewName :: !(Maybe QualifiedTable)
|
||||
@ -92,6 +117,7 @@ data TableDiff
|
||||
, _tdAddedCols :: ![PGRawColumnInfo]
|
||||
, _tdAlteredCols :: ![(PGRawColumnInfo, PGRawColumnInfo)]
|
||||
, _tdDroppedFKeyCons :: ![ConstraintName]
|
||||
, _tdComputedFields :: !ComputedFieldDiff
|
||||
-- The final list of uniq/primary constraint names
|
||||
-- used for generating types on_conflict clauses
|
||||
-- TODO: this ideally should't be part of TableDiff
|
||||
@ -102,7 +128,7 @@ data TableDiff
|
||||
getTableDiff :: TableMeta -> TableMeta -> TableDiff
|
||||
getTableDiff oldtm newtm =
|
||||
TableDiff mNewName droppedCols addedCols alteredCols
|
||||
droppedFKeyConstraints uniqueOrPrimaryCons mNewDesc
|
||||
droppedFKeyConstraints computedColDiff uniqueOrPrimaryCons mNewDesc
|
||||
where
|
||||
mNewName = bool (Just $ tmTable newtm) Nothing $ tmTable oldtm == tmTable newtm
|
||||
oldCols = tmColumns oldtm
|
||||
@ -140,6 +166,26 @@ getTableDiff oldtm newtm =
|
||||
|
||||
mkFKeyUniqId (ForeignKey _ reftn _ _ colMap) = (reftn, colMap)
|
||||
|
||||
-- calculate computed field diff
|
||||
oldComputedFieldMeta = tmComputedFields oldtm
|
||||
newComputedFieldMeta = tmComputedFields newtm
|
||||
|
||||
droppedComputedFields = map ccmName $
|
||||
getDifference (fmOid . ccmFunctionMeta) oldComputedFieldMeta newComputedFieldMeta
|
||||
|
||||
alteredComputedFields =
|
||||
getOverlap (fmOid . ccmFunctionMeta) oldComputedFieldMeta newComputedFieldMeta
|
||||
|
||||
overloadedComputedFieldFunctions =
|
||||
let getFunction = fmFunction . ccmFunctionMeta
|
||||
getSecondElement (_ NE.:| list) = listToMaybe list
|
||||
in mapMaybe (fmap ((&&&) ccmName getFunction) . getSecondElement) $
|
||||
flip NE.groupBy newComputedFieldMeta $ \l r ->
|
||||
ccmName l == ccmName r && getFunction l == getFunction r
|
||||
|
||||
computedColDiff = ComputedFieldDiff droppedComputedFields alteredComputedFields
|
||||
overloadedComputedFieldFunctions
|
||||
|
||||
getTableChangeDeps
|
||||
:: (QErrM m, CacheRWM m)
|
||||
=> TableInfo PGColumnInfo -> TableDiff -> m [SchemaObjId]
|
||||
@ -153,10 +199,11 @@ getTableChangeDeps ti tableDiff = do
|
||||
droppedConsDeps <- fmap concat $ forM droppedFKeyConstraints $ \droppedCons -> do
|
||||
let objId = SOTableObj tn $ TOCons droppedCons
|
||||
return $ getDependentObjs sc objId
|
||||
return $ droppedConsDeps <> droppedColDeps
|
||||
return $ droppedConsDeps <> droppedColDeps <> droppedComputedFieldDeps
|
||||
where
|
||||
tn = _tiName ti
|
||||
TableDiff _ droppedCols _ _ droppedFKeyConstraints _ _ = tableDiff
|
||||
TableDiff _ droppedCols _ _ droppedFKeyConstraints computedColDiff _ _ = tableDiff
|
||||
droppedComputedFieldDeps = map (SOTableObj tn . TOComputedField) $ _cfdDropped computedColDiff
|
||||
|
||||
data SchemaDiff
|
||||
= SchemaDiff
|
||||
@ -195,40 +242,20 @@ getSchemaChangeDeps schemaDiff = do
|
||||
isDirectDep (SOTableObj tn _) = tn `HS.member` HS.fromList droppedTables
|
||||
isDirectDep _ = False
|
||||
|
||||
data FunctionMeta
|
||||
= FunctionMeta
|
||||
{ fmOid :: !Int
|
||||
, fmSchema :: !SchemaName
|
||||
, fmName :: !FunctionName
|
||||
, fmType :: !FunctionType
|
||||
, fmDescription :: !(Maybe PGDescription)
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 2 snakeCase) ''FunctionMeta)
|
||||
|
||||
funcFromMeta :: FunctionMeta -> QualifiedFunction
|
||||
funcFromMeta fm = QualifiedObject (fmSchema fm) (fmName fm)
|
||||
|
||||
fetchFunctionMeta :: Q.Tx [FunctionMeta]
|
||||
fetchFunctionMeta =
|
||||
map (Q.getAltJ . runIdentity) <$> Q.listQ [Q.sql|
|
||||
SELECT
|
||||
json_build_object(
|
||||
'oid', p.oid :: integer,
|
||||
'schema', f.function_schema,
|
||||
'name', f.function_name,
|
||||
'oid', f.function_oid,
|
||||
'function', json_build_object('name', f.function_name, 'schema', f.function_schema),
|
||||
'type', f.function_type,
|
||||
'description', f.description
|
||||
) AS function_meta
|
||||
FROM
|
||||
hdb_catalog.hdb_function_agg f
|
||||
JOIN pg_catalog.pg_proc p ON (p.proname = f.function_name)
|
||||
JOIN pg_catalog.pg_namespace pn ON (
|
||||
pn.oid = p.pronamespace
|
||||
AND pn.nspname = f.function_schema
|
||||
)
|
||||
WHERE
|
||||
f.function_schema <> 'hdb_catalog'
|
||||
GROUP BY p.oid, f.function_schema, f.function_name, f.function_type, f.description
|
||||
|] () False
|
||||
|
||||
data FunctionDiff
|
||||
@ -241,18 +268,18 @@ getFuncDiff :: [FunctionMeta] -> [FunctionMeta] -> FunctionDiff
|
||||
getFuncDiff oldMeta newMeta =
|
||||
FunctionDiff droppedFuncs alteredFuncs
|
||||
where
|
||||
droppedFuncs = map funcFromMeta $ getDifference fmOid oldMeta newMeta
|
||||
droppedFuncs = map fmFunction $ getDifference fmOid oldMeta newMeta
|
||||
alteredFuncs = mapMaybe mkAltered $ getOverlap fmOid oldMeta newMeta
|
||||
mkAltered (oldfm, newfm) =
|
||||
let isTypeAltered = fmType oldfm /= fmType newfm
|
||||
isDescriptionAltered = fmDescription oldfm /= fmDescription newfm
|
||||
alteredFunc = (funcFromMeta oldfm, fmType newfm, fmDescription newfm)
|
||||
alteredFunc = (fmFunction oldfm, fmType newfm, fmDescription newfm)
|
||||
in bool Nothing (Just alteredFunc) $ isTypeAltered || isDescriptionAltered
|
||||
|
||||
getOverloadedFuncs
|
||||
:: [QualifiedFunction] -> [FunctionMeta] -> [QualifiedFunction]
|
||||
getOverloadedFuncs trackedFuncs newFuncMeta =
|
||||
duplicates $ map funcFromMeta trackedMeta
|
||||
duplicates $ map fmFunction trackedMeta
|
||||
where
|
||||
trackedMeta = flip filter newFuncMeta $ \fm ->
|
||||
funcFromMeta fm `elem` trackedFuncs
|
||||
fmFunction fm `elem` trackedFuncs
|
||||
|
@ -20,41 +20,29 @@ import qualified Language.GraphQL.Draft.Syntax as G
|
||||
|
||||
import qualified Data.HashMap.Strict as M
|
||||
import qualified Data.Sequence as Seq
|
||||
import qualified Data.Text as T
|
||||
import qualified Database.PG.Query as Q
|
||||
|
||||
|
||||
data PGTypType
|
||||
= PTBASE
|
||||
| PTCOMPOSITE
|
||||
| PTDOMAIN
|
||||
| PTENUM
|
||||
| PTRANGE
|
||||
| PTPSEUDO
|
||||
deriving (Show, Eq)
|
||||
$(deriveJSON defaultOptions{constructorTagModifier = drop 2} ''PGTypType)
|
||||
|
||||
data RawFuncInfo
|
||||
= RawFuncInfo
|
||||
data RawFunctionInfo
|
||||
= RawFunctionInfo
|
||||
{ rfiHasVariadic :: !Bool
|
||||
, rfiFunctionType :: !FunctionType
|
||||
, rfiReturnTypeSchema :: !SchemaName
|
||||
, rfiReturnTypeName :: !T.Text
|
||||
, rfiReturnTypeType :: !PGTypType
|
||||
, rfiReturnTypeName :: !PGScalarType
|
||||
, rfiReturnTypeType :: !PGTypeKind
|
||||
, rfiReturnsSet :: !Bool
|
||||
, rfiInputArgTypes :: ![PGScalarType]
|
||||
, rfiInputArgNames :: ![T.Text]
|
||||
, rfiInputArgTypes :: ![QualifiedPGType]
|
||||
, rfiInputArgNames :: ![FunctionArgName]
|
||||
, rfiDefaultArgs :: !Int
|
||||
, rfiReturnsTable :: !Bool
|
||||
, rfiDescription :: !(Maybe PGDescription)
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''RawFuncInfo)
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''RawFunctionInfo)
|
||||
|
||||
mkFunctionArgs :: Int -> [PGScalarType] -> [T.Text] -> [FunctionArg]
|
||||
mkFunctionArgs :: Int -> [QualifiedPGType] -> [FunctionArgName] -> [FunctionArg]
|
||||
mkFunctionArgs defArgsNo tys argNames =
|
||||
bool withNames withNoNames $ null argNames
|
||||
where
|
||||
hasDefaultBoolSeq = replicate (length argNames - defArgsNo) False
|
||||
hasDefaultBoolSeq = replicate (length tys - defArgsNo) False
|
||||
-- only last arguments can have default expression
|
||||
<> replicate defArgsNo True
|
||||
|
||||
@ -65,7 +53,7 @@ mkFunctionArgs defArgsNo tys argNames =
|
||||
withNames = zipWith mkArg argNames tysWithHasDefault
|
||||
|
||||
mkArg "" (ty, hasDef) = FunctionArg Nothing ty hasDef
|
||||
mkArg n (ty, hasDef) = FunctionArg (Just $ FunctionArgName n) ty hasDef
|
||||
mkArg n (ty, hasDef) = FunctionArg (Just n) ty hasDef
|
||||
|
||||
validateFuncArgs :: MonadError QErr m => [FunctionArg] -> m ()
|
||||
validateFuncArgs args =
|
||||
@ -77,12 +65,12 @@ validateFuncArgs args =
|
||||
invalidArgs = filter (not . G.isValidName) $ map G.Name funcArgsText
|
||||
|
||||
mkFunctionInfo
|
||||
:: (QErrM m, HasSystemDefined m) => QualifiedFunction -> RawFuncInfo -> m FunctionInfo
|
||||
:: (QErrM m, HasSystemDefined m) => QualifiedFunction -> RawFunctionInfo -> m FunctionInfo
|
||||
mkFunctionInfo qf rawFuncInfo = do
|
||||
-- throw error if function has variadic arguments
|
||||
when hasVariadic $ throw400 NotSupported "function with \"VARIADIC\" parameters are not supported"
|
||||
-- throw error if return type is not composite type
|
||||
when (retTyTyp /= PTCOMPOSITE) $ throw400 NotSupported "function does not return a \"COMPOSITE\" type"
|
||||
when (retTyType /= PGKindComposite) $ throw400 NotSupported "function does not return a \"COMPOSITE\" type"
|
||||
-- throw error if function do not returns SETOF
|
||||
unless retSet $ throw400 NotSupported "function does not return a SETOF"
|
||||
-- throw error if return type is not a valid table
|
||||
@ -96,12 +84,13 @@ mkFunctionInfo qf rawFuncInfo = do
|
||||
systemDefined <- askSystemDefined
|
||||
let funcArgsSeq = Seq.fromList funcArgs
|
||||
dep = SchemaDependency (SOTable retTable) DRTable
|
||||
retTable = QualifiedObject retSn (TableName retN)
|
||||
retTable = typeToTable returnType
|
||||
return $ FunctionInfo qf systemDefined funTy funcArgsSeq retTable [dep] descM
|
||||
where
|
||||
RawFuncInfo hasVariadic funTy retSn retN retTyTyp retSet
|
||||
RawFunctionInfo hasVariadic funTy rtSN retN retTyType retSet
|
||||
inpArgTyps inpArgNames defArgsNo returnsTab descM
|
||||
= rawFuncInfo
|
||||
returnType = QualifiedPGType rtSN retN retTyType
|
||||
|
||||
saveFunctionToCatalog :: QualifiedFunction -> SystemDefined -> Q.TxE QErr ()
|
||||
saveFunctionToCatalog (QualifiedObject sn fn) systemDefined =
|
||||
@ -137,7 +126,7 @@ trackFunctionP1 (TrackFunction qf) = do
|
||||
throw400 NotSupported $ "table with name " <> qf <<> " already exists"
|
||||
|
||||
trackFunctionP2Setup :: (QErrM m, CacheRWM m, HasSystemDefined m, MonadTx m)
|
||||
=> QualifiedFunction -> RawFuncInfo -> m ()
|
||||
=> QualifiedFunction -> RawFunctionInfo -> m ()
|
||||
trackFunctionP2Setup qf rawfi = do
|
||||
fi <- mkFunctionInfo qf rawfi
|
||||
let retTable = fiReturnType fi
|
||||
@ -159,27 +148,32 @@ trackFunctionP2 qf = do
|
||||
GS.checkConflictingNode defGCtx funcNameGQL
|
||||
|
||||
-- fetch function info
|
||||
functionInfos <- liftTx fetchFuncDets
|
||||
rawfi <- case functionInfos of
|
||||
[] ->
|
||||
throw400 NotExists $ "no such function exists in postgres : " <>> qf
|
||||
[rawfi] -> return rawfi
|
||||
_ ->
|
||||
throw400 NotSupported $
|
||||
"function " <> qf <<> " is overloaded. Overloaded functions are not supported"
|
||||
rawfi <- fetchRawFunctioInfo qf
|
||||
trackFunctionP2Setup qf rawfi
|
||||
systemDefined <- askSystemDefined
|
||||
liftTx $ saveFunctionToCatalog qf systemDefined
|
||||
return successMsg
|
||||
|
||||
handleMultipleFunctions :: (QErrM m) => QualifiedFunction -> [a] -> m a
|
||||
handleMultipleFunctions qf = \case
|
||||
[] ->
|
||||
throw400 NotExists $ "no such function exists in postgres : " <>> qf
|
||||
[fi] -> return fi
|
||||
_ ->
|
||||
throw400 NotSupported $
|
||||
"function " <> qf <<> " is overloaded. Overloaded functions are not supported"
|
||||
|
||||
fetchRawFunctioInfo :: MonadTx m => QualifiedFunction -> m RawFunctionInfo
|
||||
fetchRawFunctioInfo qf@(QualifiedObject sn fn) = do
|
||||
handleMultipleFunctions qf =<< map (Q.getAltJ . runIdentity) <$> fetchFromDatabase
|
||||
where
|
||||
QualifiedObject sn fn = qf
|
||||
fetchFuncDets = map (Q.getAltJ . runIdentity) <$>
|
||||
fetchFromDatabase = liftTx $
|
||||
Q.listQE defaultTxErrorHandler [Q.sql|
|
||||
SELECT function_info
|
||||
FROM hdb_catalog.hdb_function_info_agg
|
||||
WHERE function_schema = $1
|
||||
AND function_name = $2
|
||||
|] (sn, fn) True
|
||||
SELECT function_info
|
||||
FROM hdb_catalog.hdb_function_info_agg
|
||||
WHERE function_schema = $1
|
||||
AND function_name = $2
|
||||
|] (sn, fn) True
|
||||
|
||||
runTrackFunc
|
||||
:: ( QErrM m, CacheRWM m, HasSystemDefined m
|
||||
|
@ -215,15 +215,15 @@ updateInsPermFlds refQT rename rn (InsPerm chk preset cols) = do
|
||||
updateSelPermFlds
|
||||
:: (MonadTx m, CacheRM m)
|
||||
=> QualifiedTable -> Rename -> RoleName -> SelPerm -> m ()
|
||||
updateSelPermFlds refQT rename rn (SelPerm cols fltr limit aggAllwd) = do
|
||||
updateSelPermFlds refQT rename rn (SelPerm cols fltr limit aggAllwd computedCols) = do
|
||||
updatedPerm <- case rename of
|
||||
RTable rt -> do
|
||||
let updFltr = updateTableInBoolExp rt fltr
|
||||
return $ SelPerm cols updFltr limit aggAllwd
|
||||
return $ SelPerm cols updFltr limit aggAllwd computedCols
|
||||
RField rf -> do
|
||||
updFltr <- updateFieldInBoolExp refQT rf fltr
|
||||
let updCols = updateCols refQT rf cols
|
||||
return $ SelPerm updCols updFltr limit aggAllwd
|
||||
return $ SelPerm updCols updFltr limit aggAllwd computedCols
|
||||
liftTx $ updatePermDefInCatalog PTSelect refQT rn updatedPerm
|
||||
|
||||
updateUpdPermFlds
|
||||
@ -316,8 +316,9 @@ updateColExp qt rf (ColExp fld val) =
|
||||
fim <- askFieldInfoMap qt
|
||||
fi <- askFieldInfo fim fld
|
||||
case fi of
|
||||
FIColumn _ -> return val
|
||||
FIRelationship ri -> do
|
||||
FIColumn _ -> return val
|
||||
FIComputedField _ -> return val
|
||||
FIRelationship ri -> do
|
||||
let remTable = riRTable ri
|
||||
be <- decodeValue val
|
||||
ube <- updateFieldInBoolExp remTable rf be
|
||||
|
@ -257,12 +257,16 @@ processTableChanges ti tableDiff = do
|
||||
renameTableInCatalog newTN tn
|
||||
return True
|
||||
|
||||
-- Process computed field diff
|
||||
processComputedFieldDiff tn
|
||||
-- Drop custom column names for dropped columns
|
||||
customColumnNames <- possiblyDropCustomColumnNames tn
|
||||
maybe (withOldTabName customColumnNames) (withNewTabName customColumnNames) mNewName
|
||||
|
||||
where
|
||||
TableDiff mNewName droppedCols addedCols alteredCols _ constraints descM = tableDiff
|
||||
TableDiff mNewName droppedCols addedCols alteredCols _
|
||||
computedColDiff constraints descM = tableDiff
|
||||
|
||||
replaceConstraints tn = flip modTableInCache tn $ \tInfo ->
|
||||
return $ tInfo {_tiUniqOrPrimConstraints = constraints}
|
||||
|
||||
@ -330,6 +334,24 @@ processTableChanges ti tableDiff = do
|
||||
|
||||
| otherwise -> performColumnUpdate $> False
|
||||
|
||||
processComputedFieldDiff table = do
|
||||
let ComputedFieldDiff _ altered overloaded = computedColDiff
|
||||
getFunction = fmFunction . ccmFunctionMeta
|
||||
getFunctionDescription = fmDescription . ccmFunctionMeta
|
||||
forM_ overloaded $ \(columnName, function) ->
|
||||
throw400 NotSupported $ "The function " <> function
|
||||
<<> " associated with computed field" <> columnName
|
||||
<<> " of table " <> table <<> " is being overloaded"
|
||||
forM_ altered $ \(old, new) ->
|
||||
if | (fmType . ccmFunctionMeta) new == FTVOLATILE ->
|
||||
throw400 NotSupported $ "The type of function " <> getFunction old
|
||||
<<> " associated with computed field " <> ccmName old
|
||||
<<> " of table " <> table <<> " is being altered to \"VOLATILE\""
|
||||
| getFunctionDescription old /= getFunctionDescription new ->
|
||||
updateComputedFieldFunctionDescription table (ccmName old)
|
||||
(getFunctionDescription new)
|
||||
| otherwise -> pure ()
|
||||
|
||||
delTableAndDirectDeps
|
||||
:: (QErrM m, CacheRWM m, MonadTx m) => QualifiedTable -> m ()
|
||||
delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do
|
||||
@ -346,6 +368,10 @@ delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do
|
||||
DELETE FROM "hdb_catalog"."event_triggers"
|
||||
WHERE schema_name = $1 AND table_name = $2
|
||||
|] (sn, tn) False
|
||||
Q.unitQ [Q.sql|
|
||||
DELETE FROM "hdb_catalog"."hdb_computed_field"
|
||||
WHERE table_schema = $1 AND table_name = $2
|
||||
|] (sn, tn) False
|
||||
deleteTableFromCatalog qtn
|
||||
delTableFromCache qtn
|
||||
|
||||
|
@ -45,11 +45,13 @@ mkAdminRolePermInfo :: TableInfo PGColumnInfo -> RolePermInfo
|
||||
mkAdminRolePermInfo ti =
|
||||
RolePermInfo (Just i) (Just s) (Just u) (Just d)
|
||||
where
|
||||
pgCols = map pgiColumn $ getCols $ _tiFieldInfoMap ti
|
||||
fields = _tiFieldInfoMap ti
|
||||
pgCols = map pgiColumn $ getCols fields
|
||||
computedCols = map _cfiName $ getComputedFieldInfos fields
|
||||
|
||||
tn = _tiName ti
|
||||
i = InsPermInfo (HS.fromList pgCols) tn annBoolExpTrue M.empty []
|
||||
s = SelPermInfo (HS.fromList pgCols) tn annBoolExpTrue
|
||||
s = SelPermInfo (HS.fromList pgCols) (HS.fromList computedCols) tn annBoolExpTrue
|
||||
Nothing True []
|
||||
u = UpdPermInfo (HS.fromList pgCols) tn annBoolExpTrue M.empty []
|
||||
d = DelPermInfo tn annBoolExpTrue []
|
||||
|
@ -67,7 +67,7 @@ mutateAndFetchCols qt cols (cte, p) strfyNum =
|
||||
<$> Q.rawQE dmlTxErrorHandler (Q.fromBuilder sql) (toList p) True
|
||||
where
|
||||
aliasIden = Iden $ qualObjectToText qt <> "__mutation_result"
|
||||
tabFrom = TableFrom qt $ Just aliasIden
|
||||
tabFrom = FromIden aliasIden
|
||||
tabPerm = TablePerm annBoolExpTrue Nothing
|
||||
selFlds = flip map cols $
|
||||
\ci -> (fromPGCol $ pgiColumn ci, FCol ci Nothing)
|
||||
|
@ -88,7 +88,7 @@ mkMutFldExp qt singleObj strfyNum = \case
|
||||
MExp t -> S.SELit t
|
||||
MRet selFlds ->
|
||||
-- let tabFrom = TableFrom qt $ Just frmItem
|
||||
let tabFrom = TableFrom qt $ Just $ qualTableToAliasIden qt
|
||||
let tabFrom = FromIden $ qualTableToAliasIden qt
|
||||
tabPerm = TablePerm annBoolExpTrue Nothing
|
||||
in S.SESelect $ mkSQLSelect singleObj $
|
||||
AnnSelG selFlds tabFrom tabPerm noTableArgs strfyNum
|
||||
|
@ -14,7 +14,6 @@ where
|
||||
import Data.Aeson.Types
|
||||
import Instances.TH.Lift ()
|
||||
|
||||
import qualified Data.HashMap.Strict as HM
|
||||
import qualified Data.HashSet as HS
|
||||
import qualified Data.List.NonEmpty as NE
|
||||
import qualified Data.Sequence as DS
|
||||
@ -54,13 +53,14 @@ convWildcard
|
||||
-> SelPermInfo
|
||||
-> Wildcard
|
||||
-> m [ExtCol]
|
||||
convWildcard fieldInfoMap (SelPermInfo cols _ _ _ _ _) wildcard =
|
||||
convWildcard fieldInfoMap selPermInfo wildcard =
|
||||
case wildcard of
|
||||
Star -> return simpleCols
|
||||
(StarDot wc) -> (simpleCols ++) <$> (catMaybes <$> relExtCols wc)
|
||||
where
|
||||
(pgCols, relColInfos) = partitionFieldInfosWith (pgiColumn, id) $
|
||||
HM.elems fieldInfoMap
|
||||
cols = spiCols selPermInfo
|
||||
pgCols = map pgiColumn $ getCols fieldInfoMap
|
||||
relColInfos = getRels fieldInfoMap
|
||||
|
||||
simpleCols = map ECSimple $ filter (`HS.member` cols) pgCols
|
||||
|
||||
@ -125,6 +125,10 @@ convOrderByElem sessVarBldr (flds, spi) = \case
|
||||
[ fldName <<> " is a"
|
||||
, " relationship and should be expanded"
|
||||
]
|
||||
FIComputedField _ -> throw400 UnexpectedPayload $ mconcat
|
||||
[ fldName <<> " is a"
|
||||
, " computed field and can't be used in 'order_by'"
|
||||
]
|
||||
OCRel fldName rest -> do
|
||||
fldInfo <- askFieldInfo flds fldName
|
||||
case fldInfo of
|
||||
@ -132,6 +136,10 @@ convOrderByElem sessVarBldr (flds, spi) = \case
|
||||
[ fldName <<> " is a Postgres column"
|
||||
, " and cannot be chained further"
|
||||
]
|
||||
FIComputedField _ -> throw400 UnexpectedPayload $ mconcat
|
||||
[ fldName <<> " is a"
|
||||
, " computed field and can't be used in 'order_by'"
|
||||
]
|
||||
FIRelationship relInfo -> do
|
||||
when (riType relInfo == ArrRel) $
|
||||
throw400 UnexpectedPayload $ mconcat
|
||||
@ -185,7 +193,7 @@ convSelectQ fieldInfoMap selPermInfo selQ sessVarBldr prepValBldr = do
|
||||
resolvedSelFltr <- convAnnBoolExpPartialSQL sessVarBldr $
|
||||
spiFilter selPermInfo
|
||||
|
||||
let tabFrom = TableFrom (spiTable selPermInfo) Nothing
|
||||
let tabFrom = FromTable $ spiTable selPermInfo
|
||||
tabPerm = TablePerm resolvedSelFltr mPermLimit
|
||||
tabArgs = TableArgs wClause annOrdByM mQueryLimit
|
||||
(S.intToSQLExp <$> mQueryOffset) Nothing
|
||||
|
@ -6,6 +6,7 @@ module Hasura.RQL.DML.Select.Internal
|
||||
)
|
||||
where
|
||||
|
||||
import Control.Lens hiding (op)
|
||||
import Data.List (delete, sort)
|
||||
import Instances.TH.Lift ()
|
||||
|
||||
@ -18,7 +19,6 @@ import Hasura.RQL.DML.Internal
|
||||
import Hasura.RQL.DML.Select.Types
|
||||
import Hasura.RQL.GBoolExp
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.Server.Utils
|
||||
import Hasura.SQL.Rewrite (prefixNumToAliases)
|
||||
import Hasura.SQL.Types
|
||||
|
||||
@ -28,15 +28,22 @@ import qualified Hasura.SQL.DML as S
|
||||
-- Stage 1 : Convert input query into an annotated AST
|
||||
-- Stage 2 : Convert annotated AST to SQL Select
|
||||
|
||||
tableFromToFromItem :: TableFrom -> S.FromItem
|
||||
tableFromToFromItem = \case
|
||||
TableFrom tn Nothing -> S.FISimple tn Nothing
|
||||
TableFrom _ (Just i) -> S.FIIden i
|
||||
functionToIden :: QualifiedFunction -> Iden
|
||||
functionToIden = Iden . qualObjectToText
|
||||
|
||||
tableFromToQual :: TableFrom -> S.Qual
|
||||
tableFromToQual = \case
|
||||
TableFrom tn Nothing -> S.QualTable tn
|
||||
TableFrom _ (Just i) -> S.QualIden i
|
||||
selFromToFromItem :: Iden -> SelectFrom -> S.FromItem
|
||||
selFromToFromItem pfx = \case
|
||||
FromTable tn -> S.FISimple tn Nothing
|
||||
FromIden i -> S.FIIden i
|
||||
FromFunction qf args ->
|
||||
S.FIFunc $ S.FunctionExp qf (fromTableRowArgs pfx args) $
|
||||
Just $ S.toAlias $ functionToIden qf
|
||||
|
||||
selFromToQual :: SelectFrom -> S.Qual
|
||||
selFromToQual = \case
|
||||
FromTable tn -> S.QualTable tn
|
||||
FromIden i -> S.QualIden i
|
||||
FromFunction qf _ -> S.QualIden $ functionToIden qf
|
||||
|
||||
aggFldToExp :: AggFlds -> S.SQLExp
|
||||
aggFldToExp aggFlds = jsonRow
|
||||
@ -157,6 +164,10 @@ mkObjRelTableAls :: Iden -> RelName -> Iden
|
||||
mkObjRelTableAls pfx relName =
|
||||
pfx <> Iden ".or." <> toIden relName
|
||||
|
||||
mkComputedFieldTableAls :: Iden -> FieldName -> Iden
|
||||
mkComputedFieldTableAls pfx fldAls =
|
||||
pfx <> Iden ".cf." <> toIden fldAls
|
||||
|
||||
mkBaseTableAls :: Iden -> Iden
|
||||
mkBaseTableAls pfx =
|
||||
pfx <> Iden ".base"
|
||||
@ -169,6 +180,15 @@ mkOrderByFieldName :: RelName -> FieldName
|
||||
mkOrderByFieldName relName =
|
||||
FieldName $ relNameToTxt relName <> "." <> "order_by"
|
||||
|
||||
fromTableRowArgs
|
||||
:: Iden -> FunctionArgsExpTableRow S.SQLExp -> S.FunctionArgs
|
||||
fromTableRowArgs pfx = toFunctionArgs . fmap toSQLExp
|
||||
where
|
||||
toFunctionArgs (FunctionArgsExp positional named) =
|
||||
S.FunctionArgs positional named
|
||||
toSQLExp AETableRow = S.SERowIden $ mkBaseTableAls pfx
|
||||
toSQLExp (AEInput s) = s
|
||||
|
||||
-- posttgres ignores anything beyond 63 chars for an iden
|
||||
-- in this case, we'll need to use json_build_object function
|
||||
-- json_build_object is slower than row_to_json hence it is only
|
||||
@ -200,16 +220,30 @@ buildJsonObject pfx parAls arrRelCtx strfyNum flds =
|
||||
in S.mkQIdenExp qual fldAls
|
||||
FArr arrSel ->
|
||||
let arrPfx = _aniPrefix $ mkArrNodeInfo pfx parAls arrRelCtx $
|
||||
ANIField (fldAls, arrSel)
|
||||
ANIField (fldAls, arrSel)
|
||||
in S.mkQIdenExp arrPfx fldAls
|
||||
FComputedField (CFSScalar computedFieldScalar) ->
|
||||
fromScalarComputedField computedFieldScalar
|
||||
FComputedField (CFSTable _) ->
|
||||
let ccPfx = mkComputedFieldTableAls pfx fldAls
|
||||
in S.mkQIdenExp ccPfx fldAls
|
||||
|
||||
toSQLCol :: PGColumnInfo -> Maybe ColOp -> S.SQLExp
|
||||
toSQLCol col colOpM =
|
||||
toJSONableExp strfyNum (pgiType col) $ case colOpM of
|
||||
Nothing -> colNameExp
|
||||
Just (ColOp op cExp) -> S.mkSQLOpExp op colNameExp cExp
|
||||
toJSONableExp strfyNum (pgiType col) $ withColOp colOpM $
|
||||
S.mkQIdenExp (mkBaseTableAls pfx) $ pgiColumn col
|
||||
|
||||
fromScalarComputedField :: ComputedFieldScalarSel S.SQLExp -> S.SQLExp
|
||||
fromScalarComputedField computedFieldScalar =
|
||||
toJSONableExp strfyNum (PGColumnScalar ty) $ withColOp colOpM $
|
||||
S.SEFunction $ S.FunctionExp fn (fromTableRowArgs pfx args) Nothing
|
||||
where
|
||||
colNameExp = S.mkQIdenExp (mkBaseTableAls pfx) $ pgiColumn col
|
||||
ComputedFieldScalarSel fn args ty colOpM = computedFieldScalar
|
||||
|
||||
withColOp :: Maybe ColOp -> S.SQLExp -> S.SQLExp
|
||||
withColOp colOpM sqlExp = case colOpM of
|
||||
Nothing -> sqlExp
|
||||
Just (ColOp op cExp) -> S.mkSQLOpExp op sqlExp cExp
|
||||
|
||||
-- uses row_to_json to build a json object
|
||||
withRowToJSON
|
||||
@ -309,6 +343,7 @@ processAnnOrderByCol pfx parAls arrRelCtx strfyNum = \case
|
||||
(HM.singleton nesAls nesCol)
|
||||
(maybe HM.empty (uncurry HM.singleton) objNodeM)
|
||||
(maybe HM.empty (uncurry HM.singleton) arrNodeM)
|
||||
HM.empty
|
||||
relNode = ObjNode colMapping relBaseNode
|
||||
in ( (nesAls, qualCol)
|
||||
, OBNObjNode rn relNode
|
||||
@ -318,11 +353,11 @@ processAnnOrderByCol pfx parAls arrRelCtx strfyNum = \case
|
||||
mkArrNodeInfo pfx parAls arrRelCtx $ ANIAggOrdBy rn
|
||||
fldName = mkAggObFld annAggOb
|
||||
qOrdBy = S.mkQIdenExp arrPfx $ toIden fldName
|
||||
tabFrom = TableFrom relTab Nothing
|
||||
tabFrom = FromTable relTab
|
||||
tabPerm = TablePerm relFltr Nothing
|
||||
(extr, arrFlds) = mkAggObExtrAndFlds annAggOb
|
||||
selFld = TAFAgg arrFlds
|
||||
bn = mkBaseNode False arrPfx fldName selFld tabFrom
|
||||
bn = mkBaseNode False (Prefixes arrPfx pfx) fldName selFld tabFrom
|
||||
tabPerm noTableArgs strfyNum
|
||||
aggNode = ArrNode [extr] colMapping $ mergeBaseNodes bn $
|
||||
mkEmptyBaseNode arrPfx tabFrom
|
||||
@ -347,16 +382,16 @@ processDistinctOnCol pfx neCols = (distOnExp, colExtrs)
|
||||
colExtrs = flip map cols $ mkQColAls &&& mkQCol
|
||||
|
||||
|
||||
mkEmptyBaseNode :: Iden -> TableFrom -> BaseNode
|
||||
mkEmptyBaseNode pfx tableFrom =
|
||||
mkEmptyBaseNode :: Iden -> SelectFrom -> BaseNode
|
||||
mkEmptyBaseNode pfx selectFrom =
|
||||
BaseNode pfx Nothing fromItem (S.BELit True) Nothing Nothing
|
||||
Nothing selOne HM.empty HM.empty
|
||||
Nothing selOne HM.empty HM.empty HM.empty
|
||||
where
|
||||
selOne = HM.singleton (S.Alias $ pfx <> Iden "__one") (S.SEUnsafe "1")
|
||||
fromItem = tableFromToFromItem tableFrom
|
||||
fromItem = selFromToFromItem pfx selectFrom
|
||||
|
||||
aggSelToArrNode :: Iden -> FieldName -> ArrRelAgg -> ArrNode
|
||||
aggSelToArrNode pfx als aggSel =
|
||||
aggSelToArrNode :: Prefixes -> FieldName -> ArrRelAgg -> ArrNode
|
||||
aggSelToArrNode pfxs als aggSel =
|
||||
ArrNode [extr] colMapping mergedBN
|
||||
where
|
||||
AnnRelG _ colMapping annSel = aggSel
|
||||
@ -370,11 +405,11 @@ aggSelToArrNode pfx als aggSel =
|
||||
ordBy = _bnOrderBy mergedBN
|
||||
|
||||
allBNs = map mkAggBaseNode aggFlds
|
||||
emptyBN = mkEmptyBaseNode pfx tabFrm
|
||||
emptyBN = mkEmptyBaseNode (_pfThis pfxs) tabFrm
|
||||
mergedBN = foldr mergeBaseNodes emptyBN allBNs
|
||||
|
||||
mkAggBaseNode (fn, selFld) =
|
||||
mkBaseNode subQueryReq pfx fn selFld tabFrm tabPerm tabArgs strfyNum
|
||||
mkBaseNode subQueryReq pfxs fn selFld tabFrm tabPerm tabArgs strfyNum
|
||||
|
||||
selFldToExtr (FieldName t, fld) = (:) (S.SELit t) $ pure $ case fld of
|
||||
TAFAgg flds -> aggFldToExp flds
|
||||
@ -468,13 +503,13 @@ mkOrdByItems pfx fldAls orderByM strfyNum arrRelCtx =
|
||||
procOrdByM =
|
||||
unzip3 . map procAnnOrdBy' . toList <$> orderByM
|
||||
|
||||
obExtrs = maybe [] _1 procOrdByM
|
||||
ordByExpM = S.OrderByExp . _2 <$> procOrdByM
|
||||
obExtrs = maybe [] (^. _1) procOrdByM
|
||||
ordByExpM = S.OrderByExp . (^. _2) <$> procOrdByM
|
||||
|
||||
ordByObjs = mapMaybe getOrdByRelNode $ maybe [] _3 procOrdByM
|
||||
ordByObjs = mapMaybe getOrdByRelNode $ maybe [] (^. _3) procOrdByM
|
||||
ordByObjsMap = HM.fromListWith mergeObjNodes ordByObjs
|
||||
|
||||
ordByAggArrs = mapMaybe getOrdByAggNode $ maybe [] _3 procOrdByM
|
||||
ordByAggArrs = mapMaybe getOrdByAggNode $ maybe [] (^. _3) procOrdByM
|
||||
ordByArrsMap = HM.fromListWith mergeArrNodes ordByAggArrs
|
||||
|
||||
getOrdByRelNode (OBNObjNode name node) = Just (name, node)
|
||||
@ -485,19 +520,20 @@ mkOrdByItems pfx fldAls orderByM strfyNum arrRelCtx =
|
||||
|
||||
mkBaseNode
|
||||
:: Bool
|
||||
-> Iden
|
||||
-> Prefixes
|
||||
-> FieldName
|
||||
-> TableAggFld
|
||||
-> TableFrom
|
||||
-> SelectFrom
|
||||
-> TablePerm
|
||||
-> TableArgs
|
||||
-> Bool
|
||||
-> BaseNode
|
||||
mkBaseNode subQueryReq pfx fldAls annSelFlds tableFrom
|
||||
mkBaseNode subQueryReq pfxs fldAls annSelFlds selectFrom
|
||||
tablePerm tableArgs strfyNum =
|
||||
BaseNode pfx distExprM fromItem finalWhere ordByExpM finalLimit offsetM
|
||||
allExtrs allObjsWithOb allArrsWithOb
|
||||
BaseNode thisPfx distExprM fromItem finalWhere ordByExpM finalLimit offsetM
|
||||
allExtrs allObjsWithOb allArrsWithOb computedFields
|
||||
where
|
||||
Prefixes thisPfx baseTablepfx = pfxs
|
||||
TablePerm permFilter permLimit = tablePerm
|
||||
TableArgs whereM orderByM inpLimitM offsetM distM = tableArgs
|
||||
|
||||
@ -517,18 +553,21 @@ mkBaseNode subQueryReq pfx fldAls annSelFlds tableFrom
|
||||
|
||||
aggOrdByRelNames = fetchOrdByAggRels orderByM
|
||||
|
||||
(allExtrs, allObjsWithOb, allArrsWithOb, ordByExpM) =
|
||||
(allExtrs, allObjsWithOb, allArrsWithOb, computedFields, ordByExpM) =
|
||||
case annSelFlds of
|
||||
TAFNodes flds ->
|
||||
let arrFlds = mapMaybe getAnnArr flds
|
||||
arrRelCtx = mkArrRelCtx arrFlds
|
||||
selExtr = buildJsonObject pfx fldAls arrRelCtx strfyNum flds
|
||||
selExtr = buildJsonObject thisPfx fldAls arrRelCtx strfyNum flds
|
||||
-- all object relationships
|
||||
objNodes = HM.fromListWith mergeObjNodes $
|
||||
map mkObjItem (mapMaybe getAnnObj flds)
|
||||
-- all array items (array relationships + aggregates)
|
||||
arrNodes = HM.fromListWith mergeArrNodes $
|
||||
map (mkArrItem arrRelCtx) arrFlds
|
||||
-- all computed fields with table returns
|
||||
computedFieldNodes = HM.fromList $ map mkComputedFieldTable $
|
||||
mapMaybe getComputedFieldTable flds
|
||||
|
||||
(obExtrs, ordByObjs, ordByArrs, obeM)
|
||||
= mkOrdByItems' arrRelCtx
|
||||
@ -538,6 +577,7 @@ mkBaseNode subQueryReq pfx fldAls annSelFlds tableFrom
|
||||
in ( HM.fromList $ selExtr:obExtrs <> distExtrs
|
||||
, allObjs
|
||||
, allArrs
|
||||
, computedFieldNodes
|
||||
, obeM
|
||||
)
|
||||
TAFAgg tabAggs ->
|
||||
@ -547,12 +587,13 @@ mkBaseNode subQueryReq pfx fldAls annSelFlds tableFrom
|
||||
in ( HM.fromList $ extrs <> obExtrs <> distExtrs
|
||||
, ordByObjs
|
||||
, ordByArrs
|
||||
, HM.empty
|
||||
, obeM
|
||||
)
|
||||
TAFExp _ ->
|
||||
let (obExtrs, ordByObjs, ordByArrs, obeM)
|
||||
= mkOrdByItems' emptyArrRelCtx
|
||||
in (HM.fromList obExtrs, ordByObjs, ordByArrs, obeM)
|
||||
in (HM.fromList obExtrs, ordByObjs, ordByArrs, HM.empty, obeM)
|
||||
|
||||
fetchExtrFromAggFld (AFCount cty) = countTyToExps cty
|
||||
fetchExtrFromAggFld (AFOp aggOp) = aggOpToExps aggOp
|
||||
@ -567,38 +608,44 @@ mkBaseNode subQueryReq pfx fldAls annSelFlds tableFrom
|
||||
aggOpToExps = mapMaybe (mkColExp . snd) . _aoFlds
|
||||
|
||||
mkColExp (PCFCol c) =
|
||||
let qualCol = S.mkQIdenExp (mkBaseTableAls pfx) (toIden c)
|
||||
let qualCol = S.mkQIdenExp (mkBaseTableAls thisPfx) (toIden c)
|
||||
colAls = toIden c
|
||||
in Just (S.Alias colAls, qualCol)
|
||||
mkColExp _ = Nothing
|
||||
|
||||
finalWhere = toSQLBoolExp tableQual $
|
||||
maybe permFilter (andAnnBoolExps permFilter) whereM
|
||||
fromItem = tableFromToFromItem tableFrom
|
||||
tableQual = tableFromToQual tableFrom
|
||||
fromItem = selFromToFromItem baseTablepfx selectFrom
|
||||
tableQual = selFromToQual selectFrom
|
||||
|
||||
mkArrRelCtx arrSels = ArrRelCtx arrSels aggOrdByRelNames
|
||||
|
||||
mkOrdByItems' = mkOrdByItems pfx fldAls orderByM strfyNum
|
||||
mkOrdByItems' = mkOrdByItems thisPfx fldAls orderByM strfyNum
|
||||
|
||||
distItemsM = processDistinctOnCol pfx <$> distM
|
||||
distItemsM = processDistinctOnCol thisPfx <$> distM
|
||||
distExprM = fst <$> distItemsM
|
||||
distExtrs = fromMaybe [] (snd <$> distItemsM)
|
||||
|
||||
-- process an object relationship
|
||||
mkObjItem (fld, objSel) =
|
||||
let relName = aarName objSel
|
||||
objNodePfx = mkObjRelTableAls pfx $ aarName objSel
|
||||
objNode = mkObjNode objNodePfx (fld, objSel)
|
||||
objNodePfx = mkObjRelTableAls thisPfx $ aarName objSel
|
||||
objNode = mkObjNode (Prefixes objNodePfx thisPfx) (fld, objSel)
|
||||
in (relName, objNode)
|
||||
|
||||
-- process an array/array-aggregate item
|
||||
mkArrItem arrRelCtx (fld, arrSel) =
|
||||
let ArrNodeInfo arrAls arrPfx subQReq =
|
||||
mkArrNodeInfo pfx fldAls arrRelCtx $ ANIField (fld, arrSel)
|
||||
arrNode = mkArrNode subQReq arrPfx (fld, arrSel)
|
||||
mkArrNodeInfo thisPfx fldAls arrRelCtx $ ANIField (fld, arrSel)
|
||||
arrNode = mkArrNode subQReq (Prefixes arrPfx thisPfx) (fld, arrSel)
|
||||
in (arrAls, arrNode)
|
||||
|
||||
-- process a computed field, which returns a table
|
||||
mkComputedFieldTable (fld, sel) =
|
||||
let prefixes = Prefixes (mkComputedFieldTableAls thisPfx fld) thisPfx
|
||||
baseNode = annSelToBaseNode False prefixes fld sel
|
||||
in (fld, baseNode)
|
||||
|
||||
getAnnObj (f, annFld) = case annFld of
|
||||
FObj ob -> Just (f, ob)
|
||||
_ -> Nothing
|
||||
@ -607,26 +654,30 @@ mkBaseNode subQueryReq pfx fldAls annSelFlds tableFrom
|
||||
FArr ar -> Just (f, ar)
|
||||
_ -> Nothing
|
||||
|
||||
annSelToBaseNode :: Bool -> Iden -> FieldName -> AnnSimpleSel -> BaseNode
|
||||
annSelToBaseNode subQueryReq pfx fldAls annSel =
|
||||
mkBaseNode subQueryReq pfx fldAls (TAFNodes selFlds) tabFrm tabPerm tabArgs strfyNum
|
||||
getComputedFieldTable (f, annFld) = case annFld of
|
||||
FComputedField (CFSTable sel) -> Just (f, sel)
|
||||
_ -> Nothing
|
||||
|
||||
annSelToBaseNode :: Bool -> Prefixes -> FieldName -> AnnSimpleSel -> BaseNode
|
||||
annSelToBaseNode subQueryReq pfxs fldAls annSel =
|
||||
mkBaseNode subQueryReq pfxs fldAls (TAFNodes selFlds) tabFrm tabPerm tabArgs strfyNum
|
||||
where
|
||||
AnnSelG selFlds tabFrm tabPerm tabArgs strfyNum = annSel
|
||||
|
||||
mkObjNode :: Iden -> (FieldName, ObjSel) -> ObjNode
|
||||
mkObjNode pfx (fldName, AnnRelG _ rMapn rAnnSel) =
|
||||
ObjNode rMapn $ annSelToBaseNode False pfx fldName rAnnSel
|
||||
mkObjNode :: Prefixes -> (FieldName, ObjSel) -> ObjNode
|
||||
mkObjNode pfxs (fldName, AnnRelG _ rMapn rAnnSel) =
|
||||
ObjNode rMapn $ annSelToBaseNode False pfxs fldName rAnnSel
|
||||
|
||||
mkArrNode :: Bool -> Iden -> (FieldName, ArrSel) -> ArrNode
|
||||
mkArrNode subQueryReq pfx (fldName, annArrSel) = case annArrSel of
|
||||
mkArrNode :: Bool -> Prefixes -> (FieldName, ArrSel) -> ArrNode
|
||||
mkArrNode subQueryReq pfxs (fldName, annArrSel) = case annArrSel of
|
||||
ASSimple annArrRel ->
|
||||
let bn = annSelToBaseNode subQueryReq pfx fldName $ aarAnnSel annArrRel
|
||||
let bn = annSelToBaseNode subQueryReq pfxs fldName $ aarAnnSel annArrRel
|
||||
permLimit = getPermLimit $ aarAnnSel annArrRel
|
||||
extr = asJsonAggExtr False (S.toAlias fldName) subQueryReq permLimit $
|
||||
_bnOrderBy bn
|
||||
in ArrNode [extr] (aarMapping annArrRel) bn
|
||||
|
||||
ASAgg annAggSel -> aggSelToArrNode pfx fldName annAggSel
|
||||
ASAgg annAggSel -> aggSelToArrNode pfxs fldName annAggSel
|
||||
|
||||
injectJoinCond :: S.BoolExp -- ^ Join condition
|
||||
-> S.BoolExp -- ^ Where condition
|
||||
@ -635,10 +686,10 @@ injectJoinCond joinCond whereCond =
|
||||
S.WhereFrag $ S.simplifyBoolExp $ S.BEBin S.AndOp joinCond whereCond
|
||||
|
||||
mkJoinCond :: S.Alias -> [(PGCol, PGCol)] -> S.BoolExp
|
||||
mkJoinCond baseTableAls colMapn =
|
||||
mkJoinCond baseTablepfx colMapn =
|
||||
foldl' (S.BEBin S.AndOp) (S.BELit True) $ flip map colMapn $
|
||||
\(lCol, rCol) ->
|
||||
S.BECompare S.SEQ (S.mkQIdenExp baseTableAls lCol) (S.mkSIdenExp rCol)
|
||||
S.BECompare S.SEQ (S.mkQIdenExp baseTablepfx lCol) (S.mkSIdenExp rCol)
|
||||
|
||||
baseNodeToSel :: S.BoolExp -> BaseNode -> S.Select
|
||||
baseNodeToSel joinCond baseNode =
|
||||
@ -652,7 +703,7 @@ baseNodeToSel joinCond baseNode =
|
||||
}
|
||||
where
|
||||
BaseNode pfx dExp fromItem whr ordByM limitM
|
||||
offsetM extrs objRels arrRels
|
||||
offsetM extrs objRels arrRels computedFields
|
||||
= baseNode
|
||||
-- this is the table which is aliased as "pfx.base"
|
||||
baseSel = S.mkSelect
|
||||
@ -672,7 +723,8 @@ baseNodeToSel joinCond baseNode =
|
||||
joinedFrom :: S.FromItem
|
||||
joinedFrom = foldl' leftOuterJoin baseFromItem $
|
||||
map objNodeToFromItem (HM.elems objRels) <>
|
||||
map arrNodeToFromItem (HM.elems arrRels)
|
||||
map arrNodeToFromItem (HM.elems arrRels) <>
|
||||
map computedFieldNodeToFromItem (HM.toList computedFields)
|
||||
|
||||
objNodeToFromItem :: ObjNode -> S.FromItem
|
||||
objNodeToFromItem (ObjNode relMapn relBaseNode) =
|
||||
@ -686,13 +738,28 @@ baseNodeToSel joinCond baseNode =
|
||||
als = S.Alias $ _bnPrefix bn
|
||||
in S.mkLateralFromItem sel als
|
||||
|
||||
computedFieldNodeToFromItem :: (FieldName, BaseNode) -> S.FromItem
|
||||
computedFieldNodeToFromItem (fld, bn) =
|
||||
let internalSel = baseNodeToSel (S.BELit True) bn
|
||||
als = S.Alias $ _bnPrefix bn
|
||||
extr = asJsonAggExtr False (S.toAlias fld) False Nothing $
|
||||
_bnOrderBy bn
|
||||
internalSelFrom = S.mkSelFromItem internalSel als
|
||||
sel = S.mkSelect
|
||||
{ S.selExtr = pure extr
|
||||
, S.selFrom = Just $ S.FromExp [internalSelFrom]
|
||||
}
|
||||
in S.mkLateralFromItem sel als
|
||||
|
||||
mkAggSelect :: AnnAggSel -> S.Select
|
||||
mkAggSelect annAggSel =
|
||||
prefixNumToAliases $ arrNodeToSelect bn extr $ S.BELit True
|
||||
where
|
||||
aggSel = AnnRelG rootRelName [] annAggSel
|
||||
rootIden = Iden "root"
|
||||
rootPrefix = Prefixes rootIden rootIden
|
||||
ArrNode extr _ bn =
|
||||
aggSelToArrNode (Iden "root") (FieldName "root") aggSel
|
||||
aggSelToArrNode rootPrefix (FieldName "root") aggSel
|
||||
|
||||
mkSQLSelect :: Bool -> AnnSimpleSel -> S.Select
|
||||
mkSQLSelect isSingleObject annSel =
|
||||
@ -701,7 +768,9 @@ mkSQLSelect isSingleObject annSel =
|
||||
permLimit = getPermLimit annSel
|
||||
extrs = pure $ asJsonAggExtr isSingleObject rootFldAls False permLimit
|
||||
$ _bnOrderBy baseNode
|
||||
baseNode = annSelToBaseNode False (toIden rootFldName) rootFldName annSel
|
||||
rootFldIden = toIden rootFldName
|
||||
rootPrefix = Prefixes rootFldIden rootFldIden
|
||||
baseNode = annSelToBaseNode False rootPrefix rootFldName annSel
|
||||
rootFldName = FieldName "root"
|
||||
rootFldAls = S.Alias $ toIden rootFldName
|
||||
|
||||
@ -713,7 +782,7 @@ mkFuncSelectWith f annFn =
|
||||
S.SelectWith [(funcAls, S.CTESelect funcSel)] $
|
||||
-- we'll need to modify the table from of the underlying
|
||||
-- select to the alias of the select from function
|
||||
f annSel { _asnFrom = newTabFrom }
|
||||
f annSel { _asnFrom = newSelFrom }
|
||||
where
|
||||
AnnFnSel qf fnArgs annSel = annFn
|
||||
|
||||
@ -726,7 +795,7 @@ mkFuncSelectWith f annFn =
|
||||
mkSQLFunctionArgs (FunctionArgsExp positional named) =
|
||||
S.FunctionArgs positional named
|
||||
|
||||
newTabFrom = (_asnFrom annSel) {_tfIden = Just $ toIden funcAls}
|
||||
newSelFrom = FromIden $ toIden funcAls
|
||||
|
||||
QualifiedObject sn fn = qf
|
||||
funcAls = S.Alias $ Iden $
|
||||
|
@ -8,6 +8,7 @@ import Language.Haskell.TH.Syntax (Lift)
|
||||
|
||||
import qualified Data.HashMap.Strict as HM
|
||||
import qualified Data.List.NonEmpty as NE
|
||||
import qualified Data.Sequence as Seq
|
||||
import qualified Data.Text as T
|
||||
|
||||
import Hasura.Prelude
|
||||
@ -89,11 +90,32 @@ data AnnRelG a
|
||||
|
||||
type ObjSelG v = AnnRelG (AnnSimpleSelG v)
|
||||
type ObjSel = ObjSelG S.SQLExp
|
||||
|
||||
type ArrRelG v = AnnRelG (AnnSimpleSelG v)
|
||||
type ArrRelAggG v = AnnRelG (AnnAggSelG v)
|
||||
|
||||
type ArrRelAgg = ArrRelAggG S.SQLExp
|
||||
|
||||
data ComputedFieldScalarSel v
|
||||
= ComputedFieldScalarSel
|
||||
{ _cfssFunction :: !QualifiedFunction
|
||||
, _cfssArguments :: !(FunctionArgsExpTableRow v)
|
||||
, _cfssType :: !PGScalarType
|
||||
, _cfssColumnOp :: !(Maybe ColOp)
|
||||
} deriving (Show, Eq, Functor, Foldable, Traversable)
|
||||
|
||||
data ComputedFieldSel v
|
||||
= CFSScalar !(ComputedFieldScalarSel v)
|
||||
| CFSTable !(AnnSimpleSelG v)
|
||||
deriving (Show, Eq)
|
||||
|
||||
traverseComputedFieldSel
|
||||
:: (Applicative f)
|
||||
=> (v -> f w)
|
||||
-> ComputedFieldSel v -> f (ComputedFieldSel w)
|
||||
traverseComputedFieldSel fv = \case
|
||||
CFSScalar scalarSel -> CFSScalar <$> traverse fv scalarSel
|
||||
CFSTable tableSel -> CFSTable <$> traverseAnnSimpleSel fv tableSel
|
||||
|
||||
type Fields a = [(FieldName, a)]
|
||||
|
||||
data ArrSelG v
|
||||
@ -124,6 +146,7 @@ data AnnFldG v
|
||||
= FCol !PGColumnInfo !(Maybe ColOp)
|
||||
| FObj !(ObjSelG v)
|
||||
| FArr !(ArrSelG v)
|
||||
| FComputedField !(ComputedFieldSel v)
|
||||
| FExp !T.Text
|
||||
deriving (Show, Eq)
|
||||
|
||||
@ -134,6 +157,7 @@ traverseAnnFld f = \case
|
||||
FCol pgColInfo colOpM -> pure $ FCol pgColInfo colOpM
|
||||
FObj sel -> FObj <$> traverse (traverseAnnSimpleSel f) sel
|
||||
FArr sel -> FArr <$> traverseArrSel f sel
|
||||
FComputedField sel -> FComputedField <$> traverseComputedFieldSel f sel
|
||||
FExp t -> FExp <$> pure t
|
||||
|
||||
type AnnFld = AnnFldG S.SQLExp
|
||||
@ -207,11 +231,20 @@ type TableAggFld = TableAggFldG S.SQLExp
|
||||
type TableAggFldsG v = Fields (TableAggFldG v)
|
||||
type TableAggFlds = TableAggFldsG S.SQLExp
|
||||
|
||||
data TableFrom
|
||||
= TableFrom
|
||||
{ _tfTable :: !QualifiedTable
|
||||
, _tfIden :: !(Maybe Iden)
|
||||
} deriving (Show, Eq)
|
||||
data ArgumentExp a
|
||||
= AETableRow
|
||||
| AEInput !a
|
||||
deriving (Show, Eq, Functor, Foldable, Traversable)
|
||||
|
||||
type FunctionArgsExpTableRow v = FunctionArgsExpG (ArgumentExp v)
|
||||
|
||||
data SelectFromG v
|
||||
= FromTable !QualifiedTable
|
||||
| FromIden !Iden
|
||||
| FromFunction !QualifiedFunction !(FunctionArgsExpTableRow v)
|
||||
deriving (Show, Eq, Functor, Foldable, Traversable)
|
||||
|
||||
type SelectFrom = SelectFromG S.SQLExp
|
||||
|
||||
data TablePermG v
|
||||
= TablePerm
|
||||
@ -234,7 +267,7 @@ type TablePerm = TablePermG S.SQLExp
|
||||
data AnnSelG a v
|
||||
= AnnSelG
|
||||
{ _asnFields :: !a
|
||||
, _asnFrom :: !TableFrom
|
||||
, _asnFrom :: !(SelectFromG v)
|
||||
, _asnPerm :: !(TablePermG v)
|
||||
, _asnArgs :: !(TableArgsG v)
|
||||
, _asnStrfyNum :: !Bool
|
||||
@ -264,7 +297,7 @@ traverseAnnSel
|
||||
traverseAnnSel f1 f2 (AnnSelG flds tabFrom perm args strfyNum) =
|
||||
AnnSelG
|
||||
<$> f1 flds
|
||||
<*> pure tabFrom
|
||||
<*> traverse f2 tabFrom
|
||||
<*> traverseTablePerm f2 perm
|
||||
<*> traverseTableArgs f2 args
|
||||
<*> pure strfyNum
|
||||
@ -284,6 +317,22 @@ data FunctionArgsExpG a
|
||||
emptyFunctionArgsExp :: FunctionArgsExpG a
|
||||
emptyFunctionArgsExp = FunctionArgsExp [] HM.empty
|
||||
|
||||
-- | If argument positional index is less than or equal to length of 'positional' arguments then
|
||||
-- insert the value in 'positional' arguments else insert the value with argument name in 'named' arguments
|
||||
insertFunctionArg
|
||||
:: FunctionArgName
|
||||
-> Int
|
||||
-> a
|
||||
-> FunctionArgsExpG a
|
||||
-> FunctionArgsExpG a
|
||||
insertFunctionArg argName index value (FunctionArgsExp positional named) =
|
||||
if (index + 1) <= length positional then
|
||||
FunctionArgsExp (insertAt index value positional) named
|
||||
else FunctionArgsExp positional $
|
||||
HM.insert (getFuncArgNameTxt argName) value named
|
||||
where
|
||||
insertAt i a = toList . Seq.insertAt i a . Seq.fromList
|
||||
|
||||
type FunctionArgExp = FunctionArgsExpG S.SQLExp
|
||||
|
||||
data AnnFnSelG s v
|
||||
@ -322,17 +371,18 @@ traverseAnnFnAgg f =
|
||||
|
||||
data BaseNode
|
||||
= BaseNode
|
||||
{ _bnPrefix :: !Iden
|
||||
, _bnDistinct :: !(Maybe S.DistinctExpr)
|
||||
, _bnFrom :: !S.FromItem
|
||||
, _bnWhere :: !S.BoolExp
|
||||
, _bnOrderBy :: !(Maybe S.OrderByExp)
|
||||
, _bnLimit :: !(Maybe Int)
|
||||
, _bnOffset :: !(Maybe S.SQLExp)
|
||||
{ _bnPrefix :: !Iden
|
||||
, _bnDistinct :: !(Maybe S.DistinctExpr)
|
||||
, _bnFrom :: !S.FromItem
|
||||
, _bnWhere :: !S.BoolExp
|
||||
, _bnOrderBy :: !(Maybe S.OrderByExp)
|
||||
, _bnLimit :: !(Maybe Int)
|
||||
, _bnOffset :: !(Maybe S.SQLExp)
|
||||
|
||||
, _bnExtrs :: !(HM.HashMap S.Alias S.SQLExp)
|
||||
, _bnObjs :: !(HM.HashMap RelName ObjNode)
|
||||
, _bnArrs :: !(HM.HashMap S.Alias ArrNode)
|
||||
, _bnExtrs :: !(HM.HashMap S.Alias S.SQLExp)
|
||||
, _bnObjs :: !(HM.HashMap RelName ObjNode)
|
||||
, _bnArrs :: !(HM.HashMap S.Alias ArrNode)
|
||||
, _bnComputedFieldTables :: !(HM.HashMap FieldName BaseNode)
|
||||
} deriving (Show, Eq)
|
||||
|
||||
mergeBaseNodes :: BaseNode -> BaseNode -> BaseNode
|
||||
@ -341,10 +391,11 @@ mergeBaseNodes lNodeDet rNodeDet =
|
||||
(HM.union lExtrs rExtrs)
|
||||
(HM.unionWith mergeObjNodes lObjs rObjs)
|
||||
(HM.unionWith mergeArrNodes lArrs rArrs)
|
||||
(HM.unionWith mergeBaseNodes lCompCols rCompCols)
|
||||
where
|
||||
BaseNode pfx dExp f whr ordBy limit offset lExtrs lObjs lArrs
|
||||
BaseNode pfx dExp f whr ordBy limit offset lExtrs lObjs lArrs lCompCols
|
||||
= lNodeDet
|
||||
BaseNode _ _ _ _ _ _ _ rExtrs rObjs rArrs
|
||||
BaseNode _ _ _ _ _ _ _ rExtrs rObjs rArrs rCompCols
|
||||
= rNodeDet
|
||||
|
||||
data OrderByNode
|
||||
@ -407,3 +458,9 @@ data ArrNodeInfo
|
||||
, _aniPrefix :: !Iden
|
||||
, _aniSubQueryRequired :: !Bool
|
||||
} deriving (Show, Eq)
|
||||
|
||||
data Prefixes
|
||||
= Prefixes
|
||||
{ _pfThis :: !Iden -- Current node prefix
|
||||
, _pfBase :: !Iden -- Base table row identifier for computed field function
|
||||
} deriving (Show, Eq)
|
||||
|
@ -194,7 +194,7 @@ parseOperationsExpression rhsParser fim columnInfo =
|
||||
castOperations <- parseVal
|
||||
parsedCastOperations <-
|
||||
forM (M.toList castOperations) $ \(targetTypeName, castedComparisons) -> do
|
||||
let targetType = txtToPgColTy targetTypeName
|
||||
let targetType = textToPGScalarType targetTypeName
|
||||
castedColumn = ColumnReferenceCast column (PGColumnScalar targetType)
|
||||
checkValidCast targetType
|
||||
parsedCastedComparisons <- withPathK targetTypeName $
|
||||
@ -311,6 +311,8 @@ annColExp rhsParser colInfoMap (ColExp fieldName colVal) = do
|
||||
annRelBoolExp <- annBoolExp rhsParser relFieldInfoMap $
|
||||
unBoolExp relBoolExp
|
||||
return $ AVRel relInfo annRelBoolExp
|
||||
FIComputedField _ ->
|
||||
throw400 UnexpectedPayload "Computed columns can not be part of the where clause"
|
||||
|
||||
toSQLBoolExp
|
||||
:: S.Qual -> AnnBoolExpSQL -> S.BoolExp
|
||||
@ -326,8 +328,8 @@ convColRhs
|
||||
:: S.Qual -> AnnBoolExpFldSQL -> State Word64 S.BoolExp
|
||||
convColRhs tableQual = \case
|
||||
AVCol colInfo opExps -> do
|
||||
let cn = pgiColumn colInfo
|
||||
bExps = map (mkColCompExp tableQual cn) opExps
|
||||
let colFld = fromPGCol $ pgiColumn colInfo
|
||||
bExps = map (mkFieldCompExp tableQual colFld) opExps
|
||||
return $ foldr (S.BEBin S.AndOp) (S.BELit True) bExps
|
||||
|
||||
AVRel (RelInfo _ _ colMapping relTN _) nesAnn -> do
|
||||
@ -370,11 +372,12 @@ foldBoolExp f = \case
|
||||
BoolExists existsExp -> foldExists existsExp
|
||||
BoolFld ce -> f ce
|
||||
|
||||
mkColCompExp
|
||||
:: S.Qual -> PGCol -> OpExpG S.SQLExp -> S.BoolExp
|
||||
mkColCompExp qual lhsCol = mkCompExp (mkQCol lhsCol)
|
||||
mkFieldCompExp
|
||||
:: S.Qual -> FieldName -> OpExpG S.SQLExp -> S.BoolExp
|
||||
mkFieldCompExp qual lhsField = mkCompExp (mkQField lhsField)
|
||||
where
|
||||
mkQCol = S.SEQIden . S.QIden qual . toIden
|
||||
mkQField = S.SEQIden . S.QIden qual . Iden . getFieldNameTxt
|
||||
|
||||
mkCompExp :: S.SQLExp -> OpExpG S.SQLExp -> S.BoolExp
|
||||
mkCompExp lhs = \case
|
||||
|
@ -19,16 +19,20 @@ module Hasura.RQL.Types
|
||||
, HasQCtx(..)
|
||||
, mkAdminQCtx
|
||||
, askTabInfo
|
||||
, isTableTracked
|
||||
, askFieldInfoMap
|
||||
, askPGType
|
||||
, assertPGCol
|
||||
, askRelType
|
||||
, askFieldInfo
|
||||
, askPGColInfo
|
||||
, askComputedFieldInfo
|
||||
, askCurRole
|
||||
, askEventTriggerInfo
|
||||
, askTabInfoFromTrigger
|
||||
|
||||
, updateComputedFieldFunctionDescription
|
||||
|
||||
, adminOnly
|
||||
|
||||
, HeaderObj
|
||||
@ -37,26 +41,28 @@ module Hasura.RQL.Types
|
||||
, module R
|
||||
) where
|
||||
|
||||
import Hasura.Db as R
|
||||
import Hasura.EncJSON
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.BoolExp as R
|
||||
import Hasura.RQL.Types.Column as R
|
||||
import Hasura.RQL.Types.Common as R
|
||||
import Hasura.RQL.Types.DML as R
|
||||
import Hasura.RQL.Types.Error as R
|
||||
import Hasura.RQL.Types.EventTrigger as R
|
||||
import Hasura.RQL.Types.Metadata as R
|
||||
import Hasura.RQL.Types.Permission as R
|
||||
import Hasura.RQL.Types.RemoteSchema as R
|
||||
import Hasura.RQL.Types.SchemaCache as R
|
||||
import Hasura.SQL.Types
|
||||
|
||||
import qualified Hasura.GraphQL.Context as GC
|
||||
import Hasura.Db as R
|
||||
import Hasura.RQL.Types.BoolExp as R
|
||||
import Hasura.RQL.Types.Column as R
|
||||
import Hasura.RQL.Types.Common as R
|
||||
import Hasura.RQL.Types.ComputedField as R
|
||||
import Hasura.RQL.Types.DML as R
|
||||
import Hasura.RQL.Types.Error as R
|
||||
import Hasura.RQL.Types.EventTrigger as R
|
||||
import Hasura.RQL.Types.Metadata as R
|
||||
import Hasura.RQL.Types.Permission as R
|
||||
import Hasura.RQL.Types.RemoteSchema as R
|
||||
import Hasura.RQL.Types.SchemaCache as R
|
||||
|
||||
import qualified Data.HashMap.Strict as M
|
||||
import qualified Data.Text as T
|
||||
import qualified Network.HTTP.Client as HTTP
|
||||
import qualified Hasura.GraphQL.Context as GC
|
||||
|
||||
import qualified Data.HashMap.Strict as M
|
||||
import qualified Data.Text as T
|
||||
import qualified Network.HTTP.Client as HTTP
|
||||
|
||||
getFieldInfoMap
|
||||
:: QualifiedTable
|
||||
@ -92,6 +98,10 @@ askTabInfo tabName = do
|
||||
where
|
||||
errMsg = "table " <> tabName <<> " does not exist"
|
||||
|
||||
isTableTracked :: SchemaCache -> QualifiedTable -> Bool
|
||||
isTableTracked sc qt =
|
||||
isJust $ M.lookup qt $ scTables sc
|
||||
|
||||
askTabInfoFromTrigger
|
||||
:: (QErrM m, CacheRM m)
|
||||
=> TriggerName -> m (TableInfo PGColumnInfo)
|
||||
@ -190,18 +200,52 @@ askPGColInfo
|
||||
-> T.Text
|
||||
-> m columnInfo
|
||||
askPGColInfo m c msg = do
|
||||
colInfo <- modifyErr ("column " <>) $
|
||||
fieldInfo <- modifyErr ("column " <>) $
|
||||
askFieldInfo m (fromPGCol c)
|
||||
case colInfo of
|
||||
(FIColumn pgColInfo) ->
|
||||
return pgColInfo
|
||||
_ ->
|
||||
case fieldInfo of
|
||||
(FIColumn pgColInfo) -> pure pgColInfo
|
||||
(FIRelationship _) -> throwErr "relationship"
|
||||
(FIComputedField _) -> throwErr "computed field"
|
||||
where
|
||||
throwErr fieldType =
|
||||
throwError $ err400 UnexpectedPayload $ mconcat
|
||||
[ "expecting a postgres column; but, "
|
||||
, c <<> " is a relationship; "
|
||||
, c <<> " is a " <> fieldType <> "; "
|
||||
, msg
|
||||
]
|
||||
|
||||
askComputedFieldInfo
|
||||
:: (MonadError QErr m)
|
||||
=> FieldInfoMap columnInfo
|
||||
-> ComputedFieldName
|
||||
-> m ComputedFieldInfo
|
||||
askComputedFieldInfo fields computedField = do
|
||||
fieldInfo <- modifyErr ("computed field " <>) $
|
||||
askFieldInfo fields $ fromComputedField computedField
|
||||
case fieldInfo of
|
||||
(FIColumn _) -> throwErr "column"
|
||||
(FIRelationship _) -> throwErr "relationship"
|
||||
(FIComputedField cci) -> pure cci
|
||||
where
|
||||
throwErr fieldType =
|
||||
throwError $ err400 UnexpectedPayload $ mconcat
|
||||
[ "expecting a computed field; but, "
|
||||
, computedField <<> " is a " <> fieldType <> "; "
|
||||
]
|
||||
|
||||
updateComputedFieldFunctionDescription
|
||||
:: (QErrM m, CacheRWM m)
|
||||
=> QualifiedTable -> ComputedFieldName -> Maybe PGDescription -> m ()
|
||||
updateComputedFieldFunctionDescription table computedField description = do
|
||||
fields <- _tiFieldInfoMap <$> askTabInfo table
|
||||
computedFieldInfo <- askComputedFieldInfo fields computedField
|
||||
deleteComputedFieldFromCache table computedField
|
||||
let updatedComputedFieldInfo = computedFieldInfo
|
||||
{ _cfiFunction = (_cfiFunction computedFieldInfo)
|
||||
{_cffDescription = description}
|
||||
}
|
||||
addComputedFieldToCache table updatedComputedFieldInfo
|
||||
|
||||
assertPGCol :: (MonadError QErr m)
|
||||
=> FieldInfoMap columnInfo
|
||||
-> T.Text
|
||||
|
@ -7,6 +7,7 @@ module Hasura.RQL.Types.Catalog
|
||||
, CatalogTableInfo(..)
|
||||
|
||||
, CatalogRelation(..)
|
||||
, CatalogComputedField(..)
|
||||
, CatalogPermission(..)
|
||||
, CatalogEventTrigger(..)
|
||||
, CatalogFunction(..)
|
||||
@ -18,6 +19,7 @@ import Data.Aeson
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.TH
|
||||
|
||||
import Hasura.RQL.DDL.ComputedField
|
||||
import Hasura.RQL.DDL.Schema.Function
|
||||
import Hasura.RQL.Types.Column
|
||||
import Hasura.RQL.Types.Common
|
||||
@ -68,6 +70,13 @@ data CatalogPermission
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogPermission)
|
||||
|
||||
data CatalogComputedField
|
||||
= CatalogComputedField
|
||||
{ _cccComputedField :: !AddComputedField
|
||||
, _cccFunctionInfo :: ![RawFunctionInfo] -- multiple functions with same name
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 4 snakeCase) ''CatalogComputedField)
|
||||
|
||||
data CatalogEventTrigger
|
||||
= CatalogEventTrigger
|
||||
{ _cetTable :: !QualifiedTable
|
||||
@ -79,7 +88,7 @@ $(deriveJSON (aesonDrop 4 snakeCase) ''CatalogEventTrigger)
|
||||
data CatalogFunction
|
||||
= CatalogFunction
|
||||
{ _cfFunction :: !QualifiedFunction
|
||||
, _cfInfo :: !(Maybe RawFuncInfo)
|
||||
, _cfInfo :: ![RawFunctionInfo] -- multiple functions with same name
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogFunction)
|
||||
|
||||
@ -93,5 +102,6 @@ data CatalogMetadata
|
||||
, _cmFunctions :: ![CatalogFunction]
|
||||
, _cmForeignKeys :: ![ForeignKey]
|
||||
, _cmAllowlistCollections :: ![CollectionDef]
|
||||
, _cmComputedFields :: ![CatalogComputedField]
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogMetadata)
|
||||
|
@ -24,6 +24,8 @@ module Hasura.RQL.Types.Common
|
||||
, rootText
|
||||
|
||||
, FunctionArgName(..)
|
||||
, FunctionArg(..)
|
||||
|
||||
, SystemDefined(..)
|
||||
, isSystemDefined
|
||||
) where
|
||||
@ -184,10 +186,18 @@ instance Hashable ForeignKey
|
||||
|
||||
newtype FunctionArgName =
|
||||
FunctionArgName { getFuncArgNameTxt :: T.Text}
|
||||
deriving (Show, Eq, ToJSON)
|
||||
deriving (Show, Eq, ToJSON, FromJSON, Lift, DQuote, IsString)
|
||||
|
||||
type CustomColumnNames = HM.HashMap PGCol G.Name
|
||||
|
||||
data FunctionArg
|
||||
= FunctionArg
|
||||
{ faName :: !(Maybe FunctionArgName)
|
||||
, faType :: !QualifiedPGType
|
||||
, faHasDefault :: !Bool
|
||||
} deriving (Show, Eq)
|
||||
$(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionArg)
|
||||
|
||||
newtype SystemDefined = SystemDefined { unSystemDefined :: Bool }
|
||||
deriving (Show, Eq, FromJSON, ToJSON, Q.ToPrepArg)
|
||||
|
||||
|
69
server/src-lib/Hasura/RQL/Types/ComputedField.hs
Normal file
69
server/src-lib/Hasura/RQL/Types/ComputedField.hs
Normal file
@ -0,0 +1,69 @@
|
||||
{- |
|
||||
Description: Schema cache types related to computed field
|
||||
-}
|
||||
|
||||
module Hasura.RQL.Types.ComputedField where
|
||||
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.SQL.Types
|
||||
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.TH
|
||||
import Instances.TH.Lift ()
|
||||
import Language.Haskell.TH.Syntax (Lift)
|
||||
|
||||
import qualified Data.Sequence as Seq
|
||||
import qualified Database.PG.Query as Q
|
||||
|
||||
newtype ComputedFieldName =
|
||||
ComputedFieldName { unComputedFieldName :: NonEmptyText}
|
||||
deriving (Show, Eq, Lift, FromJSON, ToJSON, Q.ToPrepArg, DQuote, Hashable)
|
||||
|
||||
computedFieldNameToText :: ComputedFieldName -> Text
|
||||
computedFieldNameToText = unNonEmptyText . unComputedFieldName
|
||||
|
||||
fromComputedField :: ComputedFieldName -> FieldName
|
||||
fromComputedField = FieldName . computedFieldNameToText
|
||||
|
||||
-- | The function table argument is either the very first argument or the named
|
||||
-- argument with an index. The index is 0 if the named argument is the first.
|
||||
data FunctionTableArgument
|
||||
= FTAFirst
|
||||
| FTANamed
|
||||
!FunctionArgName -- ^ argument name
|
||||
!Int -- ^ argument index
|
||||
deriving (Show, Eq)
|
||||
|
||||
instance ToJSON FunctionTableArgument where
|
||||
toJSON FTAFirst = String "first_argument"
|
||||
toJSON (FTANamed argName _) = object ["name" .= argName]
|
||||
|
||||
data ComputedFieldReturn
|
||||
= CFRScalar !PGScalarType
|
||||
| CFRSetofTable !QualifiedTable
|
||||
deriving (Show, Eq)
|
||||
$(deriveToJSON defaultOptions { constructorTagModifier = snakeCase . drop 3
|
||||
, sumEncoding = TaggedObject "type" "info"
|
||||
}
|
||||
''ComputedFieldReturn
|
||||
)
|
||||
|
||||
data ComputedFieldFunction
|
||||
= ComputedFieldFunction
|
||||
{ _cffName :: !QualifiedFunction
|
||||
, _cffInputArgs :: !(Seq.Seq FunctionArg)
|
||||
, _cffTableArgument :: !FunctionTableArgument
|
||||
, _cffDescription :: !(Maybe PGDescription)
|
||||
} deriving (Show, Eq)
|
||||
$(deriveToJSON (aesonDrop 4 snakeCase) ''ComputedFieldFunction)
|
||||
|
||||
data ComputedFieldInfo
|
||||
= ComputedFieldInfo
|
||||
{ _cfiName :: !ComputedFieldName
|
||||
, _cfiFunction :: !ComputedFieldFunction
|
||||
, _cfiReturnType :: !ComputedFieldReturn
|
||||
, _cfiComment :: !(Maybe Text)
|
||||
} deriving (Show, Eq)
|
||||
$(deriveToJSON (aesonDrop 4 snakeCase) ''ComputedFieldInfo)
|
@ -3,9 +3,10 @@ module Hasura.RQL.Types.Metadata where
|
||||
import Data.Aeson
|
||||
import Hasura.Prelude
|
||||
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Text as T
|
||||
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.ComputedField
|
||||
import Hasura.RQL.Types.EventTrigger
|
||||
import Hasura.RQL.Types.Permission
|
||||
import Hasura.RQL.Types.RemoteSchema
|
||||
@ -18,16 +19,18 @@ data MetadataObjType
|
||||
| MOTEventTrigger
|
||||
| MOTFunction
|
||||
| MOTRemoteSchema
|
||||
| MOTComputedField
|
||||
deriving (Eq, Generic)
|
||||
instance Hashable MetadataObjType
|
||||
|
||||
instance Show MetadataObjType where
|
||||
show MOTTable = "table"
|
||||
show (MOTRel ty) = T.unpack (relTypeToTxt ty) <> "_relation"
|
||||
show (MOTPerm ty) = show ty <> "_permission"
|
||||
show MOTEventTrigger = "event_trigger"
|
||||
show MOTFunction = "function"
|
||||
show MOTRemoteSchema = "remote_schema"
|
||||
show MOTTable = "table"
|
||||
show (MOTRel ty) = T.unpack (relTypeToTxt ty) <> "_relation"
|
||||
show (MOTPerm ty) = show ty <> "_permission"
|
||||
show MOTEventTrigger = "event_trigger"
|
||||
show MOTFunction = "function"
|
||||
show MOTRemoteSchema = "remote_schema"
|
||||
show MOTComputedField = "computed_field"
|
||||
|
||||
instance ToJSON MetadataObjType where
|
||||
toJSON = String . T.pack . show
|
||||
@ -36,6 +39,7 @@ data TableMetadataObjId
|
||||
= MTORel !RelName !RelType
|
||||
| MTOPerm !RoleName !PermType
|
||||
| MTOTrigger !TriggerName
|
||||
| MTOComputedField !ComputedFieldName
|
||||
deriving (Show, Eq, Generic)
|
||||
instance Hashable TableMetadataObjId
|
||||
|
||||
|
@ -52,20 +52,20 @@ module Hasura.RQL.Types.SchemaCache
|
||||
, FieldInfo(..)
|
||||
, _FIColumn
|
||||
, _FIRelationship
|
||||
, fieldInfoToEither
|
||||
, partitionFieldInfos
|
||||
, partitionFieldInfosWith
|
||||
, getCols
|
||||
, getRels
|
||||
, getComputedFieldInfos
|
||||
|
||||
, isPGColInfo
|
||||
, RelInfo(..)
|
||||
, addColToCache
|
||||
, addRelToCache
|
||||
, addComputedFieldToCache
|
||||
|
||||
, delColFromCache
|
||||
, updColInCache
|
||||
, delRelFromCache
|
||||
, deleteComputedFieldFromCache
|
||||
|
||||
, RolePermInfo(..)
|
||||
, permIns
|
||||
@ -99,6 +99,7 @@ module Hasura.RQL.Types.SchemaCache
|
||||
, SchemaDependency(..)
|
||||
, mkParentDep
|
||||
, mkColDep
|
||||
, mkComputedFieldDep
|
||||
, getDependentObjs
|
||||
, getDependentObjsWith
|
||||
|
||||
@ -123,6 +124,7 @@ import Hasura.Prelude
|
||||
import Hasura.RQL.Types.BoolExp
|
||||
import Hasura.RQL.Types.Column
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.ComputedField
|
||||
import Hasura.RQL.Types.Error
|
||||
import Hasura.RQL.Types.EventTrigger
|
||||
import Hasura.RQL.Types.Metadata
|
||||
@ -153,11 +155,17 @@ mkColDep :: DependencyReason -> QualifiedTable -> PGCol -> SchemaDependency
|
||||
mkColDep reason tn col =
|
||||
flip SchemaDependency reason . SOTableObj tn $ TOCol col
|
||||
|
||||
mkComputedFieldDep
|
||||
:: DependencyReason -> QualifiedTable -> ComputedFieldName -> SchemaDependency
|
||||
mkComputedFieldDep reason tn computedField =
|
||||
flip SchemaDependency reason . SOTableObj tn $ TOComputedField computedField
|
||||
|
||||
type WithDeps a = (a, [SchemaDependency])
|
||||
|
||||
data FieldInfo columnInfo
|
||||
= FIColumn !columnInfo
|
||||
| FIRelationship !RelInfo
|
||||
| FIComputedField !ComputedFieldInfo
|
||||
deriving (Show, Eq)
|
||||
$(deriveToJSON
|
||||
defaultOptions { constructorTagModifier = snakeCase . drop 2
|
||||
@ -166,27 +174,16 @@ $(deriveToJSON
|
||||
''FieldInfo)
|
||||
$(makePrisms ''FieldInfo)
|
||||
|
||||
fieldInfoToEither :: FieldInfo columnInfo -> Either columnInfo RelInfo
|
||||
fieldInfoToEither (FIColumn l) = Left l
|
||||
fieldInfoToEither (FIRelationship r) = Right r
|
||||
|
||||
partitionFieldInfos :: [FieldInfo columnInfo] -> ([columnInfo], [RelInfo])
|
||||
partitionFieldInfos = partitionFieldInfosWith (id, id)
|
||||
|
||||
partitionFieldInfosWith :: (columnInfo -> a, RelInfo -> b)
|
||||
-> [FieldInfo columnInfo] -> ([a], [b])
|
||||
partitionFieldInfosWith fns =
|
||||
partitionEithers . map (biMapEither fns . fieldInfoToEither)
|
||||
where
|
||||
biMapEither (f1, f2) = either (Left . f1) (Right . f2)
|
||||
|
||||
type FieldInfoMap columnInfo = M.HashMap FieldName (FieldInfo columnInfo)
|
||||
|
||||
getCols :: FieldInfoMap columnInfo -> [columnInfo]
|
||||
getCols fim = lefts $ map fieldInfoToEither $ M.elems fim
|
||||
getCols = mapMaybe (^? _FIColumn) . M.elems
|
||||
|
||||
getRels :: FieldInfoMap columnInfo -> [RelInfo]
|
||||
getRels fim = rights $ map fieldInfoToEither $ M.elems fim
|
||||
getRels = mapMaybe (^? _FIRelationship) . M.elems
|
||||
|
||||
getComputedFieldInfos :: FieldInfoMap columnInfo -> [ComputedFieldInfo]
|
||||
getComputedFieldInfos = mapMaybe (^? _FIComputedField) . M.elems
|
||||
|
||||
isPGColInfo :: FieldInfo columnInfo -> Bool
|
||||
isPGColInfo (FIColumn _) = True
|
||||
@ -206,6 +203,7 @@ $(deriveToJSON (aesonDrop 3 snakeCase) ''InsPermInfo)
|
||||
data SelPermInfo
|
||||
= SelPermInfo
|
||||
{ spiCols :: !(HS.HashSet PGCol)
|
||||
, spiComputedFields :: !(HS.HashSet ComputedFieldName)
|
||||
, spiTable :: !QualifiedTable
|
||||
, spiFilter :: !AnnBoolExpPartialSQL
|
||||
, spiLimit :: !(Maybe Int)
|
||||
@ -363,7 +361,7 @@ data TableInfo columnInfo
|
||||
, _tiEnumValues :: !(Maybe EnumValues)
|
||||
, _tiCustomConfig :: !TableConfig
|
||||
} deriving (Show, Eq)
|
||||
$(deriveToJSON (aesonDrop 2 snakeCase) ''TableInfo)
|
||||
$(deriveToJSON (aesonDrop 3 snakeCase) ''TableInfo)
|
||||
$(makeLenses ''TableInfo)
|
||||
|
||||
checkForFieldConflict
|
||||
@ -396,15 +394,6 @@ funcTypToTxt FTSTABLE = "STABLE"
|
||||
instance Show FunctionType where
|
||||
show = T.unpack . funcTypToTxt
|
||||
|
||||
data FunctionArg
|
||||
= FunctionArg
|
||||
{ faName :: !(Maybe FunctionArgName)
|
||||
, faType :: !PGScalarType
|
||||
, faHasDefault :: !Bool
|
||||
} deriving (Show, Eq)
|
||||
|
||||
$(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionArg)
|
||||
|
||||
data FunctionInfo
|
||||
= FunctionInfo
|
||||
{ fiName :: !QualifiedFunction
|
||||
@ -565,6 +554,14 @@ addRelToCache rn ri deps tn = do
|
||||
where
|
||||
schObjId = SOTableObj tn $ TORel $ riName ri
|
||||
|
||||
addComputedFieldToCache
|
||||
:: (QErrM m, CacheRWM m)
|
||||
=> QualifiedTable -> ComputedFieldInfo -> m ()
|
||||
addComputedFieldToCache table computedFieldInfo =
|
||||
addFldToCache computedField (FIComputedField computedFieldInfo) table
|
||||
where
|
||||
computedField = fromComputedField $ _cfiName computedFieldInfo
|
||||
|
||||
addFldToCache
|
||||
:: (QErrM m, CacheRWM m)
|
||||
=> FieldName -> FieldInfo PGColumnInfo
|
||||
@ -604,6 +601,12 @@ delRelFromCache rn tn = do
|
||||
where
|
||||
schObjId = SOTableObj tn $ TORel rn
|
||||
|
||||
deleteComputedFieldFromCache
|
||||
:: (QErrM m, CacheRWM m)
|
||||
=> QualifiedTable -> ComputedFieldName -> m ()
|
||||
deleteComputedFieldFromCache table computedField =
|
||||
delFldFromCache (fromComputedField computedField) table
|
||||
|
||||
updColInCache
|
||||
:: (QErrM m, CacheRWM m)
|
||||
=> PGCol -> PGColumnInfo
|
||||
|
@ -6,9 +6,10 @@ import Data.Aeson.TH
|
||||
import Data.Aeson.Types
|
||||
import Hasura.Prelude
|
||||
|
||||
import qualified Data.Text as T
|
||||
import qualified Data.Text as T
|
||||
|
||||
import Hasura.RQL.Types.Common
|
||||
import Hasura.RQL.Types.ComputedField
|
||||
import Hasura.RQL.Types.EventTrigger
|
||||
import Hasura.RQL.Types.Permission
|
||||
import Hasura.SQL.Types
|
||||
@ -19,6 +20,7 @@ data TableObjId
|
||||
| TOCons !ConstraintName
|
||||
| TOPerm !RoleName !PermType
|
||||
| TOTrigger !TriggerName
|
||||
| TOComputedField !ComputedFieldName
|
||||
deriving (Show, Eq, Generic)
|
||||
|
||||
instance Hashable TableObjId
|
||||
@ -45,6 +47,8 @@ reportSchemaObj (SOTableObj tn (TOPerm rn pt)) =
|
||||
<> "." <> permTypeToCode pt
|
||||
reportSchemaObj (SOTableObj tn (TOTrigger trn )) =
|
||||
"event-trigger " <> qualObjectToText tn <> "." <> triggerNameToTxt trn
|
||||
reportSchemaObj (SOTableObj tn (TOComputedField ccn)) =
|
||||
"computed field " <> qualObjectToText tn <> "." <> computedFieldNameToText ccn
|
||||
|
||||
instance Show SchemaObjId where
|
||||
show soi = T.unpack $ reportSchemaObj soi
|
||||
|
@ -121,7 +121,7 @@ mkSelFromExp isLateral sel tn =
|
||||
alias = Alias $ toIden tn
|
||||
|
||||
mkFuncFromItem :: QualifiedFunction -> FunctionArgs -> FromItem
|
||||
mkFuncFromItem qf args = FIFunc qf args Nothing
|
||||
mkFuncFromItem qf args = FIFunc $ FunctionExp qf args Nothing
|
||||
|
||||
mkRowExp :: [Extractor] -> SQLExp
|
||||
mkRowExp extrs = let
|
||||
@ -284,6 +284,7 @@ data SQLExp
|
||||
| SETuple !TupleExp
|
||||
| SECount !CountType
|
||||
| SENamedArg !Iden !SQLExp
|
||||
| SEFunction !FunctionExp
|
||||
deriving (Show, Eq, Data)
|
||||
|
||||
withTyAnn :: PGScalarType -> SQLExp -> SQLExp
|
||||
@ -348,6 +349,7 @@ instance ToSQL SQLExp where
|
||||
toSQL (SECount ty) = "COUNT" <> paren (toSQL ty)
|
||||
-- https://www.postgresql.org/docs/current/sql-syntax-calling-funcs.html
|
||||
toSQL (SENamedArg arg val) = toSQL arg <-> "=>" <-> toSQL val
|
||||
toSQL (SEFunction funcExp) = toSQL funcExp
|
||||
|
||||
intToSQLExp :: Int -> SQLExp
|
||||
intToSQLExp =
|
||||
@ -418,10 +420,21 @@ instance ToSQL FunctionArgs where
|
||||
\(argName, argVal) -> SENamedArg (Iden argName) argVal
|
||||
in paren $ ", " <+> (positionalArgs <> namedArgs)
|
||||
|
||||
data FunctionExp
|
||||
= FunctionExp
|
||||
{ feName :: !QualifiedFunction
|
||||
, feArgs :: !FunctionArgs
|
||||
, feAlias :: !(Maybe Alias)
|
||||
} deriving (Show, Eq, Data)
|
||||
|
||||
instance ToSQL FunctionExp where
|
||||
toSQL (FunctionExp qf args alsM) =
|
||||
toSQL qf <> toSQL args <-> toSQL alsM
|
||||
|
||||
data FromItem
|
||||
= FISimple !QualifiedTable !(Maybe Alias)
|
||||
| FIIden !Iden
|
||||
| FIFunc !QualifiedFunction !FunctionArgs !(Maybe Alias)
|
||||
| FIFunc !FunctionExp
|
||||
| FIUnnest ![SQLExp] !Alias ![SQLExp]
|
||||
| FISelect !Lateral !Select !Alias
|
||||
| FIValues !ValuesExp !Alias !(Maybe [PGCol])
|
||||
@ -443,8 +456,7 @@ instance ToSQL FromItem where
|
||||
toSQL qt <-> toSQL mal
|
||||
toSQL (FIIden iden) =
|
||||
toSQL iden
|
||||
toSQL (FIFunc qf args mal) =
|
||||
toSQL qf <> toSQL args <-> toSQL mal
|
||||
toSQL (FIFunc funcExp) = toSQL funcExp
|
||||
-- unnest(expressions) alias(columns)
|
||||
toSQL (FIUnnest args als cols) =
|
||||
"UNNEST" <> paren (", " <+> args) <-> toSQL als <> paren (", " <+> cols)
|
||||
|
@ -81,14 +81,22 @@ uFromExp :: S.FromExp -> Uniq S.FromExp
|
||||
uFromExp (S.FromExp fromItems) =
|
||||
S.FromExp <$> mapM uFromItem fromItems
|
||||
|
||||
uFunctionArgs :: S.FunctionArgs -> Uniq S.FunctionArgs
|
||||
uFunctionArgs (S.FunctionArgs positional named) =
|
||||
S.FunctionArgs <$> mapM uSqlExp positional <*> mapM uSqlExp named
|
||||
|
||||
uFunctionExp :: S.FunctionExp -> Uniq S.FunctionExp
|
||||
uFunctionExp (S.FunctionExp qf args alM) =
|
||||
S.FunctionExp qf <$> uFunctionArgs args <*> mapM addAlias alM
|
||||
|
||||
uFromItem :: S.FromItem -> Uniq S.FromItem
|
||||
uFromItem fromItem = case fromItem of
|
||||
S.FISimple t alM ->
|
||||
S.FISimple t <$> mapM addAlias alM
|
||||
S.FIIden iden ->
|
||||
S.FIIden <$> return iden
|
||||
S.FIFunc f args alM ->
|
||||
S.FIFunc f args <$> mapM addAlias alM
|
||||
S.FIFunc funcExp ->
|
||||
S.FIFunc <$> uFunctionExp funcExp
|
||||
S.FIUnnest args als cols ->
|
||||
S.FIUnnest <$> mapM uSqlExp args <*> addAlias als <*> mapM uSqlExp cols
|
||||
S.FISelect isLateral sel al -> do
|
||||
@ -185,6 +193,7 @@ uSqlExp = restoringIdens . \case
|
||||
S.SEArray <$> mapM uSqlExp l
|
||||
S.SECount cty -> return $ S.SECount cty
|
||||
S.SENamedArg arg val -> S.SENamedArg arg <$> uSqlExp val
|
||||
S.SEFunction funcExp -> S.SEFunction <$> uFunctionExp funcExp
|
||||
where
|
||||
uQual = \case
|
||||
S.QualIden iden -> S.QualIden <$> getIden iden
|
||||
|
@ -53,8 +53,13 @@ module Hasura.SQL.Types
|
||||
, PGScalarType(..)
|
||||
, WithScalarType(..)
|
||||
, PGType(..)
|
||||
, txtToPgColTy
|
||||
, textToPGScalarType
|
||||
, pgTypeOid
|
||||
|
||||
, PGTypeKind(..)
|
||||
, QualifiedPGType(..)
|
||||
, isBaseType
|
||||
, typeToTable
|
||||
)
|
||||
where
|
||||
|
||||
@ -64,6 +69,7 @@ import qualified Database.PG.Query.PTI as PTI
|
||||
import Hasura.Prelude
|
||||
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.Encoding (text)
|
||||
import Data.Aeson.TH
|
||||
import Data.Aeson.Types (toJSONKeyText)
|
||||
@ -368,8 +374,8 @@ instance ToJSONKey PGScalarType where
|
||||
instance DQuote PGScalarType where
|
||||
dquoteTxt = toSQLTxt
|
||||
|
||||
txtToPgColTy :: Text -> PGScalarType
|
||||
txtToPgColTy t = case t of
|
||||
textToPGScalarType :: Text -> PGScalarType
|
||||
textToPGScalarType t = case t of
|
||||
"serial" -> PGSerial
|
||||
"bigserial" -> PGBigSerial
|
||||
|
||||
@ -421,7 +427,7 @@ txtToPgColTy t = case t of
|
||||
|
||||
|
||||
instance FromJSON PGScalarType where
|
||||
parseJSON (String t) = return $ txtToPgColTy t
|
||||
parseJSON (String t) = return $ textToPGScalarType t
|
||||
parseJSON _ = fail "Expecting a string for PGScalarType"
|
||||
|
||||
pgTypeOid :: PGScalarType -> PQ.Oid
|
||||
@ -520,3 +526,54 @@ instance (ToSQL a) => ToSQL (PGType a) where
|
||||
PGTypeScalar ty -> toSQL ty
|
||||
-- typename array is an sql standard way of declaring types
|
||||
PGTypeArray ty -> toSQL ty <> " array"
|
||||
|
||||
data PGTypeKind
|
||||
= PGKindBase
|
||||
| PGKindComposite
|
||||
| PGKindDomain
|
||||
| PGKindEnum
|
||||
| PGKindRange
|
||||
| PGKindPseudo
|
||||
| PGKindUnknown !T.Text
|
||||
deriving (Show, Eq)
|
||||
|
||||
instance FromJSON PGTypeKind where
|
||||
parseJSON = withText "postgresTypeKind" $
|
||||
\t -> pure $ case t of
|
||||
"b" -> PGKindBase
|
||||
"c" -> PGKindComposite
|
||||
"d" -> PGKindDomain
|
||||
"e" -> PGKindEnum
|
||||
"r" -> PGKindRange
|
||||
"p" -> PGKindPseudo
|
||||
_ -> PGKindUnknown t
|
||||
|
||||
instance ToJSON PGTypeKind where
|
||||
toJSON = \case
|
||||
PGKindBase -> "b"
|
||||
PGKindComposite -> "c"
|
||||
PGKindDomain -> "d"
|
||||
PGKindEnum -> "e"
|
||||
PGKindRange -> "r"
|
||||
PGKindPseudo -> "p"
|
||||
PGKindUnknown t -> String t
|
||||
|
||||
data QualifiedPGType
|
||||
= QualifiedPGType
|
||||
{ _qptSchema :: !SchemaName
|
||||
, _qptName :: !PGScalarType
|
||||
, _qptType :: !PGTypeKind
|
||||
} deriving (Show, Eq)
|
||||
$(deriveJSON (aesonDrop 4 snakeCase) ''QualifiedPGType)
|
||||
|
||||
isBaseType :: QualifiedPGType -> Bool
|
||||
isBaseType (QualifiedPGType _ n ty) =
|
||||
notUnknown && (ty == PGKindBase)
|
||||
where
|
||||
notUnknown = case n of
|
||||
PGUnknown _ -> False
|
||||
_ -> True
|
||||
|
||||
typeToTable :: QualifiedPGType -> QualifiedTable
|
||||
typeToTable (QualifiedPGType sch n _) =
|
||||
QualifiedObject sch $ TableName $ toSQLTxt n
|
||||
|
@ -1,5 +1,6 @@
|
||||
module Hasura.Server.Query where
|
||||
|
||||
import Control.Lens
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.TH
|
||||
@ -11,6 +12,7 @@ import qualified Network.HTTP.Client as HTTP
|
||||
|
||||
import Hasura.EncJSON
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.ComputedField
|
||||
import Hasura.RQL.DDL.EventTrigger
|
||||
import Hasura.RQL.DDL.Metadata
|
||||
import Hasura.RQL.DDL.Permission
|
||||
@ -45,6 +47,11 @@ data RQLQueryV1
|
||||
| RQSetRelationshipComment !SetRelComment
|
||||
| RQRenameRelationship !RenameRel
|
||||
|
||||
-- computed fields related
|
||||
|
||||
| RQAddComputedField !AddComputedField
|
||||
| RQDropComputedField !DropComputedField
|
||||
|
||||
| RQCreateInsertPermission !CreateInsPerm
|
||||
| RQCreateSelectPermission !CreateSelPerm
|
||||
| RQCreateUpdatePermission !CreateUpdPerm
|
||||
@ -230,6 +237,9 @@ queryNeedsReload (RQV1 qi) = case qi of
|
||||
RQSetRelationshipComment _ -> False
|
||||
RQRenameRelationship _ -> True
|
||||
|
||||
RQAddComputedField _ -> True
|
||||
RQDropComputedField _ -> True
|
||||
|
||||
RQCreateInsertPermission _ -> True
|
||||
RQCreateSelectPermission _ -> True
|
||||
RQCreateUpdatePermission _ -> True
|
||||
@ -311,6 +321,9 @@ runQueryM rq =
|
||||
RQSetRelationshipComment q -> runSetRelComment q
|
||||
RQRenameRelationship q -> runRenameRel q
|
||||
|
||||
RQAddComputedField q -> runAddComputedField q
|
||||
RQDropComputedField q -> runDropComputedField q
|
||||
|
||||
RQCreateInsertPermission q -> runCreatePerm q
|
||||
RQCreateSelectPermission q -> runCreatePerm q
|
||||
RQCreateUpdatePermission q -> runCreatePerm q
|
||||
|
@ -132,7 +132,7 @@ computeMetrics sc =
|
||||
let nTables = countUserTables (isNothing . _tiViewInfo)
|
||||
nViews = countUserTables (isJust . _tiViewInfo)
|
||||
nEnumTables = countUserTables (isJust . _tiEnumValues)
|
||||
allRels = join $ Map.elems $ Map.map relsOfTbl userTables
|
||||
allRels = join $ Map.elems $ Map.map (getRels . _tiFieldInfoMap) userTables
|
||||
(manualRels, autoRels) = partition riIsManual allRels
|
||||
relMetrics = RelationshipMetric (length manualRels) (length autoRels)
|
||||
rolePerms = join $ Map.elems $ Map.map permsOfTbl userTables
|
||||
@ -158,9 +158,6 @@ computeMetrics sc =
|
||||
calcPerms :: (RolePermInfo -> Maybe a) -> [RolePermInfo] -> Int
|
||||
calcPerms fn perms = length $ catMaybes $ map fn perms
|
||||
|
||||
relsOfTbl :: TableInfo PGColumnInfo -> [RelInfo]
|
||||
relsOfTbl = rights . Map.elems . Map.map fieldInfoToEither . _tiFieldInfoMap
|
||||
|
||||
permsOfTbl :: TableInfo PGColumnInfo -> [(RoleName, RolePermInfo)]
|
||||
permsOfTbl = Map.toList . _tiRolePermInfoMap
|
||||
|
||||
|
@ -98,15 +98,6 @@ duplicates = mapMaybe greaterThanOne . group . sort
|
||||
where
|
||||
greaterThanOne l = bool Nothing (Just $ head l) $ length l > 1
|
||||
|
||||
_1 :: (a, b, c) -> a
|
||||
_1 (x, _, _) = x
|
||||
|
||||
_2 :: (a, b, c) -> b
|
||||
_2 (_, y, _) = y
|
||||
|
||||
_3 :: (a, b, c) -> c
|
||||
_3 (_, _, z) = z
|
||||
|
||||
-- regex related
|
||||
matchRegex :: B.ByteString -> Bool -> T.Text -> Either String Bool
|
||||
matchRegex regex caseSensitive src =
|
||||
|
@ -7,7 +7,8 @@ select
|
||||
'remote_schemas', remote_schemas.items,
|
||||
'functions', functions.items,
|
||||
'foreign_keys', foreign_keys.items,
|
||||
'allowlist_collections', allowlist.item
|
||||
'allowlist_collections', allowlist.item,
|
||||
'computed_fields', computed_field.items
|
||||
)
|
||||
from
|
||||
(
|
||||
@ -128,15 +129,17 @@ from
|
||||
'schema', hf.function_schema,
|
||||
'name', hf.function_name
|
||||
),
|
||||
'info', function_info
|
||||
'info', hf_agg.function_info
|
||||
) as info
|
||||
from
|
||||
hdb_catalog.hdb_function hf
|
||||
left outer join
|
||||
hdb_catalog.hdb_function_info_agg hf_agg on
|
||||
( hf_agg.function_name = hf.function_name
|
||||
and hf_agg.function_schema = hf.function_schema
|
||||
)
|
||||
left join lateral
|
||||
(
|
||||
select coalesce(json_agg(function_info), '[]') as function_info
|
||||
from hdb_catalog.hdb_function_info_agg
|
||||
where function_name = hf.function_name
|
||||
and function_schema = hf.function_schema
|
||||
) hf_agg on 'true'
|
||||
) as q
|
||||
) as functions,
|
||||
(
|
||||
@ -175,4 +178,36 @@ from
|
||||
left outer join
|
||||
hdb_catalog.hdb_query_collection hqc
|
||||
on (hqc.collection_name = ha.collection_name)
|
||||
) as allowlist
|
||||
) as allowlist,
|
||||
(
|
||||
select
|
||||
coalesce(json_agg(
|
||||
json_build_object('computed_field', cc.computed_field,
|
||||
'function_info', fi.function_info
|
||||
)
|
||||
), '[]') as items
|
||||
from
|
||||
(
|
||||
select json_build_object(
|
||||
'table', jsonb_build_object('name', hcc.table_name,'schema', hcc.table_schema),
|
||||
'name', hcc.computed_field_name,
|
||||
'definition', hcc.definition,
|
||||
'comment', hcc.comment
|
||||
) as computed_field,
|
||||
hccf.function_name,
|
||||
hccf.function_schema
|
||||
from hdb_catalog.hdb_computed_field hcc
|
||||
left outer join
|
||||
hdb_catalog.hdb_computed_field_function hccf
|
||||
on ( hcc.table_name = hccf.table_name
|
||||
and hcc.table_schema = hccf.table_schema
|
||||
and hcc.computed_field_name = hccf.computed_field_name
|
||||
)
|
||||
) cc
|
||||
left join lateral
|
||||
(
|
||||
select coalesce(json_agg(function_info), '[]') as function_info
|
||||
from hdb_catalog.hdb_function_info_agg
|
||||
where function_name = cc.function_name and function_schema = cc.function_schema
|
||||
) fi on 'true'
|
||||
) as computed_field
|
||||
|
@ -336,36 +336,36 @@ SELECT
|
||||
|
||||
pg_get_functiondef(p.oid) AS function_definition,
|
||||
|
||||
rtn.nspname::text AS return_type_schema,
|
||||
rt.typname::text AS return_type_name,
|
||||
|
||||
CASE
|
||||
WHEN ((rt.typtype) :: text = ('b' :: character(1)) :: text) THEN 'BASE' :: text
|
||||
WHEN ((rt.typtype) :: text = ('c' :: character(1)) :: text) THEN 'COMPOSITE' :: text
|
||||
WHEN ((rt.typtype) :: text = ('d' :: character(1)) :: text) THEN 'DOMAIN' :: text
|
||||
WHEN ((rt.typtype) :: text = ('e' :: character(1)) :: text) THEN 'ENUM' :: text
|
||||
WHEN ((rt.typtype) :: text = ('r' :: character(1)) :: text) THEN 'RANGE' :: text
|
||||
WHEN ((rt.typtype) :: text = ('p' :: character(1)) :: text) THEN 'PSEUDO' :: text
|
||||
ELSE NULL :: text
|
||||
END AS return_type_type,
|
||||
rtn.nspname::text as return_type_schema,
|
||||
rt.typname::text as return_type_name,
|
||||
rt.typtype::text as return_type_type,
|
||||
p.proretset AS returns_set,
|
||||
( SELECT
|
||||
COALESCE(json_agg(q.type_name), '[]')
|
||||
COALESCE(json_agg(
|
||||
json_build_object('schema', q."schema",
|
||||
'name', q."name",
|
||||
'type', q."type"
|
||||
)
|
||||
), '[]')
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
pt.typname AS type_name,
|
||||
pt.typname AS "name",
|
||||
pns.nspname AS "schema",
|
||||
pt.typtype AS "type",
|
||||
pat.ordinality
|
||||
FROM
|
||||
unnest(
|
||||
COALESCE(p.proallargtypes, (p.proargtypes) :: oid [])
|
||||
) WITH ORDINALITY pat(oid, ordinality)
|
||||
LEFT JOIN pg_type pt ON ((pt.oid = pat.oid))
|
||||
LEFT JOIN pg_namespace pns ON (pt.typnamespace = pns.oid)
|
||||
ORDER BY pat.ordinality ASC
|
||||
) q
|
||||
) AS input_arg_types,
|
||||
to_json(COALESCE(p.proargnames, ARRAY [] :: text [])) AS input_arg_names,
|
||||
p.pronargdefaults AS default_args
|
||||
p.pronargdefaults AS default_args,
|
||||
p.oid::integer AS function_oid
|
||||
FROM
|
||||
pg_proc p
|
||||
JOIN pg_namespace pn ON (pn.oid = p.pronamespace)
|
||||
@ -630,3 +630,32 @@ CREATE TABLE hdb_catalog.hdb_allowlist
|
||||
collection_name TEXT UNIQUE
|
||||
REFERENCES hdb_catalog.hdb_query_collection(collection_name)
|
||||
);
|
||||
|
||||
CREATE TABLE hdb_catalog.hdb_computed_field
|
||||
(
|
||||
table_schema TEXT,
|
||||
table_name TEXT,
|
||||
computed_field_name TEXT,
|
||||
definition JSONB NOT NULL,
|
||||
comment TEXT NULL,
|
||||
|
||||
PRIMARY KEY (table_schema, table_name, computed_field_name),
|
||||
FOREIGN KEY (table_schema, table_name) REFERENCES hdb_catalog.hdb_table(table_schema, table_name) ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
CREATE VIEW hdb_catalog.hdb_computed_field_function AS
|
||||
(
|
||||
SELECT
|
||||
table_schema,
|
||||
table_name,
|
||||
computed_field_name,
|
||||
CASE
|
||||
WHEN (definition::jsonb -> 'function')::jsonb ->> 'name' IS NULL THEN definition::jsonb ->> 'function'
|
||||
ELSE (definition::jsonb -> 'function')::jsonb ->> 'name'
|
||||
END AS function_name,
|
||||
CASE
|
||||
WHEN (definition::jsonb -> 'function')::jsonb ->> 'schema' IS NULL THEN 'public'
|
||||
ELSE (definition::jsonb -> 'function')::jsonb ->> 'schema'
|
||||
END AS function_schema
|
||||
FROM hdb_catalog.hdb_computed_field
|
||||
);
|
||||
|
106
server/src-rsr/migrations/25_to_26.sql
Normal file
106
server/src-rsr/migrations/25_to_26.sql
Normal file
@ -0,0 +1,106 @@
|
||||
CREATE TABLE hdb_catalog.hdb_computed_field
|
||||
(
|
||||
table_schema TEXT,
|
||||
table_name TEXT,
|
||||
computed_field_name TEXT,
|
||||
definition JSONB NOT NULL,
|
||||
comment TEXT NULL,
|
||||
|
||||
PRIMARY KEY (table_schema, table_name, computed_field_name),
|
||||
FOREIGN KEY (table_schema, table_name) REFERENCES hdb_catalog.hdb_table(table_schema, table_name) ON UPDATE CASCADE
|
||||
);
|
||||
|
||||
CREATE VIEW hdb_catalog.hdb_computed_field_function AS
|
||||
(
|
||||
SELECT
|
||||
table_schema,
|
||||
table_name,
|
||||
computed_field_name,
|
||||
CASE
|
||||
WHEN (definition::jsonb -> 'function')::jsonb ->> 'name' IS NULL THEN definition::jsonb ->> 'function'
|
||||
ELSE (definition::jsonb -> 'function')::jsonb ->> 'name'
|
||||
END AS function_name,
|
||||
CASE
|
||||
WHEN (definition::jsonb -> 'function')::jsonb ->> 'schema' IS NULL THEN 'public'
|
||||
ELSE (definition::jsonb -> 'function')::jsonb ->> 'schema'
|
||||
END AS function_schema
|
||||
FROM hdb_catalog.hdb_computed_field
|
||||
);
|
||||
|
||||
CREATE OR REPLACE VIEW hdb_catalog.hdb_function_agg AS
|
||||
(
|
||||
SELECT
|
||||
p.proname::text AS function_name,
|
||||
pn.nspname::text AS function_schema,
|
||||
pd.description,
|
||||
|
||||
CASE
|
||||
WHEN (p.provariadic = (0) :: oid) THEN false
|
||||
ELSE true
|
||||
END AS has_variadic,
|
||||
|
||||
CASE
|
||||
WHEN (
|
||||
(p.provolatile) :: text = ('i' :: character(1)) :: text
|
||||
) THEN 'IMMUTABLE' :: text
|
||||
WHEN (
|
||||
(p.provolatile) :: text = ('s' :: character(1)) :: text
|
||||
) THEN 'STABLE' :: text
|
||||
WHEN (
|
||||
(p.provolatile) :: text = ('v' :: character(1)) :: text
|
||||
) THEN 'VOLATILE' :: text
|
||||
ELSE NULL :: text
|
||||
END AS function_type,
|
||||
|
||||
pg_get_functiondef(p.oid) AS function_definition,
|
||||
|
||||
rtn.nspname::text as return_type_schema,
|
||||
rt.typname::text as return_type_name,
|
||||
rt.typtype::text as return_type_type,
|
||||
|
||||
p.proretset AS returns_set,
|
||||
( SELECT
|
||||
COALESCE(json_agg(
|
||||
json_build_object('schema', q."schema",
|
||||
'name', q."name",
|
||||
'type', q."type"
|
||||
)
|
||||
), '[]')
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
pt.typname AS "name",
|
||||
pns.nspname AS "schema",
|
||||
pt.typtype AS "type",
|
||||
pat.ordinality
|
||||
FROM
|
||||
unnest(
|
||||
COALESCE(p.proallargtypes, (p.proargtypes) :: oid [])
|
||||
) WITH ORDINALITY pat(oid, ordinality)
|
||||
LEFT JOIN pg_type pt ON ((pt.oid = pat.oid))
|
||||
LEFT JOIN pg_namespace pns ON (pt.typnamespace = pns.oid)
|
||||
ORDER BY pat.ordinality ASC
|
||||
) q
|
||||
) AS input_arg_types,
|
||||
to_json(COALESCE(p.proargnames, ARRAY [] :: text [])) AS input_arg_names,
|
||||
p.pronargdefaults AS default_args,
|
||||
p.oid::integer AS function_oid
|
||||
FROM
|
||||
pg_proc p
|
||||
JOIN pg_namespace pn ON (pn.oid = p.pronamespace)
|
||||
JOIN pg_type rt ON (rt.oid = p.prorettype)
|
||||
JOIN pg_namespace rtn ON (rtn.oid = rt.typnamespace)
|
||||
LEFT JOIN pg_description pd ON p.oid = pd.objoid
|
||||
WHERE
|
||||
pn.nspname :: text NOT LIKE 'pg_%'
|
||||
AND pn.nspname :: text NOT IN ('information_schema', 'hdb_catalog', 'hdb_views')
|
||||
AND (NOT EXISTS (
|
||||
SELECT
|
||||
1
|
||||
FROM
|
||||
pg_aggregate
|
||||
WHERE
|
||||
((pg_aggregate.aggfnoid) :: oid = p.oid)
|
||||
)
|
||||
)
|
||||
);
|
@ -5,7 +5,8 @@ SELECT
|
||||
t.description,
|
||||
coalesce(c.columns, '[]') as columns,
|
||||
coalesce(f.constraints, '[]') as constraints,
|
||||
coalesce(fk.fkeys, '[]') as foreign_keys
|
||||
coalesce(fk.fkeys, '[]') as foreign_keys,
|
||||
coalesce(cc.computed_fields, '[]') as computed_fields
|
||||
FROM
|
||||
(
|
||||
SELECT
|
||||
@ -102,6 +103,34 @@ FROM
|
||||
fk.table_schema = t.table_schema
|
||||
AND fk.table_name = t.table_name
|
||||
)
|
||||
LEFT OUTER JOIN (
|
||||
SELECT
|
||||
c.table_schema,
|
||||
c.table_name,
|
||||
json_agg(
|
||||
json_build_object(
|
||||
'name', c.computed_field_name,
|
||||
'function_meta',
|
||||
json_build_object(
|
||||
'function', json_build_object('name', c.function_name, 'schema', c.function_schema),
|
||||
'oid', hf_agg.function_oid,
|
||||
'type', hf_agg.function_type,
|
||||
'description', hf_agg.description
|
||||
)
|
||||
)
|
||||
) as computed_fields
|
||||
FROM hdb_catalog.hdb_function_agg hf_agg
|
||||
LEFT OUTER JOIN hdb_catalog.hdb_computed_field_function c
|
||||
ON ( hf_agg.function_name = c.function_name
|
||||
AND hf_agg.function_schema = c.function_schema
|
||||
)
|
||||
GROUP BY
|
||||
c.table_schema,
|
||||
c.table_name
|
||||
) cc ON (
|
||||
cc.table_schema = t.table_schema
|
||||
AND cc.table_name = t.table_name
|
||||
)
|
||||
WHERE
|
||||
t.table_schema NOT LIKE 'pg_%'
|
||||
AND t.table_schema <> 'information_schema'
|
||||
|
@ -0,0 +1,44 @@
|
||||
description: Query author with computed fields
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
query:
|
||||
query: |
|
||||
query {
|
||||
author{
|
||||
id
|
||||
first_name
|
||||
last_name
|
||||
full_name
|
||||
get_articles(args:{search:"Article"}){
|
||||
id
|
||||
title
|
||||
content
|
||||
author_id
|
||||
}
|
||||
}
|
||||
}
|
||||
response:
|
||||
data:
|
||||
author:
|
||||
- id: 1
|
||||
first_name: Roger
|
||||
last_name: Chris
|
||||
full_name: Roger Chris
|
||||
get_articles:
|
||||
- id: 1
|
||||
title: Article 1
|
||||
content: Content for Article 1
|
||||
author_id: 1
|
||||
- id: 2
|
||||
first_name: Daniel
|
||||
last_name:
|
||||
full_name: Daniel
|
||||
get_articles:
|
||||
- id: 2
|
||||
title: Article 2
|
||||
content: Content for Article 2
|
||||
author_id: 2
|
||||
- id: 3
|
||||
title: Article 3
|
||||
content: Content for Article 3
|
||||
author_id: 2
|
@ -0,0 +1,28 @@
|
||||
description: Query author with computed fields as user role
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
headers:
|
||||
X-Hasura-Role: user
|
||||
X-Hasura-User-Id: '1'
|
||||
query:
|
||||
query: |
|
||||
query {
|
||||
author{
|
||||
full_name
|
||||
get_articles(args:{search:"Article"}){
|
||||
id
|
||||
title
|
||||
content
|
||||
author_id
|
||||
}
|
||||
}
|
||||
}
|
||||
response:
|
||||
data:
|
||||
author:
|
||||
- full_name: Roger Chris
|
||||
get_articles:
|
||||
- id: 1
|
||||
title: Article 1
|
||||
content: Content for Article 1
|
||||
author_id: 1
|
@ -0,0 +1,97 @@
|
||||
type: bulk
|
||||
args:
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
CREATE TABLE author(
|
||||
id SERIAL PRIMARY KEY,
|
||||
first_name TEXT NOT NULL,
|
||||
last_name TEXT
|
||||
);
|
||||
|
||||
INSERT INTO author (first_name, last_name)
|
||||
VALUES ('Roger', 'Chris'), ('Daniel', NULL);
|
||||
|
||||
CREATE TABLE article(
|
||||
id SERIAL PRIMARY KEY,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT,
|
||||
author_id INTEGER NOT NULL REFERENCES author(id)
|
||||
);
|
||||
|
||||
INSERT INTO article (title, content, author_id)
|
||||
VALUES
|
||||
('Article 1', 'Content for Article 1', 1),
|
||||
('Article 2', 'Content for Article 2', 2),
|
||||
('Article 3', 'Content for Article 3', 2);
|
||||
|
||||
CREATE FUNCTION fetch_articles(search text, author_row author)
|
||||
RETURNS SETOF article AS $$
|
||||
SELECT *
|
||||
FROM article
|
||||
WHERE
|
||||
( title ilike ('%' || search || '%')
|
||||
OR content ilike ('%' || search || '%')
|
||||
) AND author_id = author_row.id
|
||||
$$ LANGUAGE sql STABLE;
|
||||
|
||||
CREATE FUNCTION full_name(author)
|
||||
RETURNS TEXT AS $$
|
||||
DECLARE
|
||||
first_name text;
|
||||
last_name text;
|
||||
full_name text;
|
||||
BEGIN
|
||||
first_name := $1.first_name;
|
||||
last_name := $1.last_name;
|
||||
IF last_name IS NULL THEN
|
||||
full_name := first_name;
|
||||
ELSE full_name := first_name || ' ' || last_name;
|
||||
END IF;
|
||||
RETURN full_name;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql STABLE;
|
||||
|
||||
- type: track_table
|
||||
args:
|
||||
name: author
|
||||
schema: public
|
||||
|
||||
- type: track_table
|
||||
args:
|
||||
name: article
|
||||
schema: public
|
||||
|
||||
- type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: get_articles
|
||||
definition:
|
||||
function: fetch_articles
|
||||
table_argument: author_row
|
||||
|
||||
- type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: full_name
|
||||
definition:
|
||||
function: full_name
|
||||
|
||||
- type: create_select_permission
|
||||
args:
|
||||
table: article
|
||||
role: user
|
||||
permission:
|
||||
columns: '*'
|
||||
filter: {}
|
||||
- type: create_select_permission
|
||||
args:
|
||||
table: author
|
||||
role: user
|
||||
permission:
|
||||
columns: []
|
||||
filter:
|
||||
id: X-Hasura-User-Id
|
||||
computed_fields:
|
||||
- full_name
|
||||
- get_articles
|
@ -0,0 +1,10 @@
|
||||
type: bulk
|
||||
args:
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
DROP FUNCTION fetch_articles(text, author);
|
||||
DROP FUNCTION full_name(author);
|
||||
DROP TABLE article;
|
||||
DROP TABLE author;
|
||||
cascade: true
|
50
server/tests-py/queries/v1/computed_fields/add_and_drop.yaml
Normal file
50
server/tests-py/queries/v1/computed_fields/add_and_drop.yaml
Normal file
@ -0,0 +1,50 @@
|
||||
- description: Add a computed field for author table
|
||||
url: /v1/query
|
||||
status: 200
|
||||
query:
|
||||
type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: get_articles
|
||||
definition:
|
||||
function: fetch_articles
|
||||
table_argument: author_row
|
||||
response:
|
||||
message: success
|
||||
|
||||
- description: Drop computed field of a non existed table
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: drop_computed_field
|
||||
args:
|
||||
table: random
|
||||
name: get_articles
|
||||
response:
|
||||
path: "$.args.table"
|
||||
error: table "random" does not exist
|
||||
code: not-exists
|
||||
|
||||
- description: Drop a non existed computed field
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: drop_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: random
|
||||
response:
|
||||
path: "$.args.name"
|
||||
error: computed field "random" does not exist
|
||||
code: not-exists
|
||||
|
||||
- description: Drop a valid computed field
|
||||
url: /v1/query
|
||||
status: 200
|
||||
query:
|
||||
type: drop_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: get_articles
|
||||
response:
|
||||
message: success
|
@ -0,0 +1,114 @@
|
||||
- description: Try adding a computed field for invalid table
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: add_computed_field
|
||||
args:
|
||||
table: random
|
||||
name: full_name
|
||||
definition:
|
||||
function: full_name
|
||||
response:
|
||||
path: "$.args.table"
|
||||
error: table "random" does not exist
|
||||
code: not-exists
|
||||
|
||||
- description: Try adding computed field with existing column name
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: first_name
|
||||
definition:
|
||||
function: full_name
|
||||
response:
|
||||
path: "$.args.name"
|
||||
error: column/relationship "first_name" of table "author" already exists
|
||||
code: already-exists
|
||||
|
||||
- description: Try adding computed field with invalid function
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: full_name
|
||||
definition:
|
||||
function: random_function
|
||||
response:
|
||||
path: "$.args.definition.function"
|
||||
error: 'no such function exists in postgres : "random_function"'
|
||||
code: not-exists
|
||||
|
||||
- description: Try adding computed field with invalid table argument name
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: full_name
|
||||
definition:
|
||||
function: full_name
|
||||
table_argument: random
|
||||
response:
|
||||
path: "$.args.definition"
|
||||
error: the computed field "full_name" cannot be added to table "author" because "random"
|
||||
is not an input argument of "full_name" function
|
||||
code: not-supported
|
||||
|
||||
- description: Try adding computed field with a volatile function
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: get_articles
|
||||
definition:
|
||||
function: fetch_articles_volatile
|
||||
table_argument: random
|
||||
response:
|
||||
path: "$.args.definition"
|
||||
error: "the computed field \"get_articles\" cannot be added to table \"author\" for
|
||||
the following reasons: \n • the function \"fetch_articles_volatile\" is of type
|
||||
VOLATILE; cannot be added as a computed field\n • \"random\" is not an input argument
|
||||
of \"fetch_articles_volatile\" function\n"
|
||||
code: not-supported
|
||||
|
||||
- description: Try adding a computed field with a function with no input arguments
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: hello_world
|
||||
definition:
|
||||
function: hello_world
|
||||
response:
|
||||
path: "$.args.definition"
|
||||
error: the computed field "hello_world" cannot be added to table "author" because
|
||||
the function "hello_world" has no input arguments
|
||||
code: not-supported
|
||||
|
||||
- description: Try adding a computed field with first argument as table argument
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: get_articles
|
||||
definition:
|
||||
function: fetch_articles
|
||||
response:
|
||||
path: "$.args.definition"
|
||||
error: "the computed field \"get_articles\" cannot be added to table \"author\" for
|
||||
the following reasons: \n • first argument of the function \"fetch_articles\" is
|
||||
not COMPOSITE type\n • first argument of the function \"fetch_articles\" of type
|
||||
\"pg_catalog.text\" is not the table to which the computed field is being added\n"
|
||||
code: not-supported
|
@ -0,0 +1,133 @@
|
||||
- description: Add a computed field get_articles to author table
|
||||
url: /v1/query
|
||||
status: 200
|
||||
query:
|
||||
type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: get_articles
|
||||
definition:
|
||||
function: fetch_articles
|
||||
table_argument: author_row
|
||||
response:
|
||||
message: success
|
||||
|
||||
- description: Add a computed field full_name to author table
|
||||
url: /v1/query
|
||||
status: 200
|
||||
query:
|
||||
type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: full_name
|
||||
definition:
|
||||
function: full_name
|
||||
response:
|
||||
message: success
|
||||
|
||||
- description: Create select permission with non existed computed fields
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: create_select_permission
|
||||
args:
|
||||
table: author
|
||||
role: user
|
||||
permission:
|
||||
columns: '*'
|
||||
computed_fields:
|
||||
- full_name
|
||||
- random
|
||||
filter: {}
|
||||
response:
|
||||
path: "$.args.computed_fields[1]"
|
||||
error: computed field "random" does not exist
|
||||
code: not-exists
|
||||
|
||||
- description: Create select permission with computed field whose return table's select permission is not defined
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: create_select_permission
|
||||
args:
|
||||
table: author
|
||||
role: user
|
||||
permission:
|
||||
columns: '*'
|
||||
computed_fields:
|
||||
- full_name
|
||||
- get_articles
|
||||
filter: {}
|
||||
response:
|
||||
path: "$.args.computed_fields[1]"
|
||||
error: computed field "get_articles" executes function "fetch_articles" which returns
|
||||
set of table "article"; select permission on "article" for role "user" does not
|
||||
exist
|
||||
code: permission-denied
|
||||
|
||||
- description: Create select permission on article table
|
||||
url: /v1/query
|
||||
status: 200
|
||||
query:
|
||||
type: create_select_permission
|
||||
args:
|
||||
table: article
|
||||
role: user
|
||||
permission:
|
||||
columns: '*'
|
||||
filter: {}
|
||||
response:
|
||||
message: success
|
||||
|
||||
- description: Create select permission with computed fields on author table
|
||||
url: /v1/query
|
||||
status: 200
|
||||
query:
|
||||
type: create_select_permission
|
||||
args:
|
||||
table: author
|
||||
role: user
|
||||
permission:
|
||||
columns: '*'
|
||||
computed_fields:
|
||||
- full_name
|
||||
- get_articles
|
||||
filter: {}
|
||||
response:
|
||||
message: success
|
||||
|
||||
- description: Try to drop a computed field defined in permission
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: drop_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: get_articles
|
||||
response:
|
||||
path: "$.args"
|
||||
error: 'cannot drop due to the following dependent objects : permission author.user.select'
|
||||
code: dependency-error
|
||||
|
||||
- description: Drop a computed field with cascade
|
||||
url: /v1/query
|
||||
status: 200
|
||||
query:
|
||||
type: drop_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: get_articles
|
||||
cascade: true
|
||||
response:
|
||||
message: success
|
||||
|
||||
- description: Drop a computed field
|
||||
url: /v1/query
|
||||
status: 200
|
||||
query:
|
||||
type: drop_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: full_name
|
||||
response:
|
||||
message: success
|
116
server/tests-py/queries/v1/computed_fields/run_sql.yaml
Normal file
116
server/tests-py/queries/v1/computed_fields/run_sql.yaml
Normal file
@ -0,0 +1,116 @@
|
||||
- description: Create a computed field get_articles to author table
|
||||
url: /v1/query
|
||||
status: 200
|
||||
query:
|
||||
type: add_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: get_articles
|
||||
definition:
|
||||
function: fetch_articles
|
||||
table_argument: author_row
|
||||
response:
|
||||
message: success
|
||||
|
||||
- description: Try to alter the fetch_articles function name
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
ALTER FUNCTION fetch_articles(text, author) RENAME TO fetch_articles_renamed
|
||||
response:
|
||||
path: "$.args"
|
||||
error: 'cannot drop due to the following dependent objects : computed field author.get_articles'
|
||||
code: dependency-error
|
||||
|
||||
- description: Try to alter the fetch_articles function to VOLATILE
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
ALTER FUNCTION fetch_articles(text, author) VOLATILE
|
||||
response:
|
||||
path: "$.args"
|
||||
error: The type of function "fetch_articles" associated with computed field "get_articles"
|
||||
of table "author" is being altered to "VOLATILE"
|
||||
code: not-supported
|
||||
|
||||
- description: Try to create a new function with name fetch_articles (overloading)
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
CREATE OR REPLACE FUNCTION fetch_articles(search text, author_row author, integer)
|
||||
RETURNS SETOF article AS $$
|
||||
SELECT *
|
||||
FROM article
|
||||
WHERE
|
||||
( title ilike ('%' || search || '%')
|
||||
OR content ilike ('%' || search || '%')
|
||||
) AND author_id = author_row.id
|
||||
LIMIT $3
|
||||
$$ LANGUAGE sql STABLE;
|
||||
response:
|
||||
path: "$.args"
|
||||
error: The function "fetch_articles" associated with computed field"get_articles"
|
||||
of table "author" is being overloaded
|
||||
code: not-supported
|
||||
|
||||
- description: Drop the function fetch_articles and create a new function with the same name
|
||||
url: /v1/query
|
||||
status: 400
|
||||
query:
|
||||
type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
DROP FUNCTION fetch_articles(text, author);
|
||||
CREATE FUNCTION fetch_articles(search text, author_row author, integer)
|
||||
RETURNS SETOF article AS $$
|
||||
SELECT *
|
||||
FROM article
|
||||
WHERE
|
||||
( title ilike ('%' || search || '%')
|
||||
OR content ilike ('%' || search || '%')
|
||||
) AND author_id = author_row.id
|
||||
LIMIT $3
|
||||
$$ LANGUAGE sql STABLE;
|
||||
response:
|
||||
path: "$.args"
|
||||
error: 'cannot drop due to the following dependent objects : computed field author.get_articles'
|
||||
code: dependency-error
|
||||
|
||||
- description: Safely alter the definition of function fetch_articles
|
||||
url: /v1/query
|
||||
status: 200
|
||||
query:
|
||||
type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
CREATE OR REPLACE FUNCTION fetch_articles(search text, author_row author)
|
||||
RETURNS SETOF article AS $$
|
||||
SELECT *
|
||||
FROM article
|
||||
WHERE
|
||||
content ilike ('%' || search || '%')
|
||||
AND author_id = author_row.id
|
||||
$$ LANGUAGE sql STABLE;
|
||||
response:
|
||||
result_type: CommandOk
|
||||
result: null
|
||||
|
||||
- description: Drop computed field get_articles from author table
|
||||
url: /v1/query
|
||||
status: 200
|
||||
query:
|
||||
type: drop_computed_field
|
||||
args:
|
||||
table: author
|
||||
name: get_articles
|
||||
response:
|
||||
message: success
|
78
server/tests-py/queries/v1/computed_fields/setup.yaml
Normal file
78
server/tests-py/queries/v1/computed_fields/setup.yaml
Normal file
@ -0,0 +1,78 @@
|
||||
type: bulk
|
||||
args:
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
CREATE TABLE author(
|
||||
id SERIAL PRIMARY KEY,
|
||||
first_name TEXT NOT NULL,
|
||||
last_name TEXT
|
||||
);
|
||||
|
||||
INSERT INTO author (first_name, last_name)
|
||||
VALUES ('Roger', 'Chris'), ('Daniel', NULL);
|
||||
|
||||
CREATE TABLE article(
|
||||
id SERIAL PRIMARY KEY,
|
||||
title TEXT NOT NULL,
|
||||
content TEXT,
|
||||
author_id INTEGER NOT NULL REFERENCES author(id)
|
||||
);
|
||||
|
||||
INSERT INTO article (title, content, author_id)
|
||||
VALUES
|
||||
('Article 1', 'Content for Article 1', 1),
|
||||
('Article 2', 'Content for Article 2', 2),
|
||||
('Article 3', 'Content for Article 3', 2);
|
||||
|
||||
CREATE FUNCTION fetch_articles(search text, author_row author)
|
||||
RETURNS SETOF article AS $$
|
||||
SELECT *
|
||||
FROM article
|
||||
WHERE
|
||||
( title ilike ('%' || search || '%')
|
||||
OR content ilike ('%' || search || '%')
|
||||
) AND author_id = author_row.id
|
||||
$$ LANGUAGE sql STABLE;
|
||||
|
||||
CREATE FUNCTION fetch_articles_volatile(search text, author_row author)
|
||||
RETURNS SETOF article AS $$
|
||||
SELECT *
|
||||
FROM article
|
||||
WHERE
|
||||
( title ilike ('%' || search || '%')
|
||||
OR content ilike ('%' || search || '%')
|
||||
) AND author_id = author_row.id
|
||||
$$ LANGUAGE sql VOLATILE;
|
||||
|
||||
CREATE FUNCTION full_name(author)
|
||||
RETURNS TEXT AS $$
|
||||
DECLARE
|
||||
first_name text;
|
||||
last_name text;
|
||||
full_name text;
|
||||
BEGIN
|
||||
first_name := $1.first_name;
|
||||
last_name := $1.last_name;
|
||||
IF last_name IS NULL THEN
|
||||
full_name := first_name;
|
||||
ELSE full_name := first_name || ' ' || last_name;
|
||||
END IF;
|
||||
RETURN full_name;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql STABLE;
|
||||
|
||||
CREATE FUNCTION hello_world()
|
||||
RETURNS TEXT AS $$
|
||||
SELECT 'Hello, World!'::text
|
||||
$$ LANGUAGE sql STABLE;
|
||||
|
||||
- type: track_table
|
||||
args:
|
||||
name: author
|
||||
schema: public
|
||||
|
||||
- type: track_table
|
||||
args:
|
||||
name: article
|
||||
schema: public
|
12
server/tests-py/queries/v1/computed_fields/teardown.yaml
Normal file
12
server/tests-py/queries/v1/computed_fields/teardown.yaml
Normal file
@ -0,0 +1,12 @@
|
||||
type: bulk
|
||||
args:
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
DROP FUNCTION hello_world();
|
||||
DROP FUNCTION fetch_articles(text, author);
|
||||
DROP FUNCTION fetch_articles_volatile(text, author);
|
||||
DROP FUNCTION full_name(author);
|
||||
DROP TABLE article;
|
||||
DROP TABLE author;
|
||||
cascade: true
|
@ -505,6 +505,18 @@ class TestGraphQLQueryEnums(DefaultTestSelectQueries):
|
||||
def test_select_where_enum_eq_without_enum_table_visibility(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/select_where_enum_eq_without_enum_table_visibility.yaml', transport)
|
||||
|
||||
@pytest.mark.parametrize('transport', ['http', 'websocket'])
|
||||
class TestGraphQLQueryComputedFields(DefaultTestSelectQueries):
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return 'queries/graphql_query/computed_fields'
|
||||
|
||||
def test_computed_fields(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/computed_fields.yaml', transport)
|
||||
|
||||
def test_computed_fields_permission(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/computed_fields_permission.yaml', transport)
|
||||
|
||||
@pytest.mark.parametrize('transport', ['http', 'websocket'])
|
||||
class TestGraphQLQueryCaching(DefaultTestSelectQueries):
|
||||
@classmethod
|
||||
|
@ -655,3 +655,20 @@ class TestSetTableCustomFields(DefaultTestQueries):
|
||||
|
||||
def test_alter_column(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/alter_column.yaml')
|
||||
|
||||
class TestComputedFields(DefaultTestQueries):
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return 'queries/v1/computed_fields'
|
||||
|
||||
def test_add_computed_fields_errors(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/add_computed_field_errors.yaml')
|
||||
|
||||
def test_add_and_drop(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/add_and_drop.yaml')
|
||||
|
||||
def test_create_permissions(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/create_permissions.yaml')
|
||||
|
||||
def test_run_sql(self, hge_ctx):
|
||||
check_query_f(hge_ctx, self.dir() + '/run_sql.yaml')
|
||||
|
Loading…
Reference in New Issue
Block a user