Data Connectors insert mutations support [GDC-609]

[GDC-609]: https://hasurahq.atlassian.net/browse/GDC-609?atlOrigin=eyJpIjoiNWRkNTljNzYxNjVmNDY3MDlhMDU5Y2ZhYzA5YTRkZjUiLCJwIjoiZ2l0aHViLWNvbS1KU1cifQ

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/7513
GitOrigin-RevId: cb401e3ed84c5b60ec59b63dc478a1162b360135
This commit is contained in:
Daniel Chambers 2023-01-17 11:33:56 +11:00 committed by hasura-bot
parent 06b284cf33
commit bfdeaf0334
30 changed files with 1689 additions and 867 deletions

View File

@ -1,6 +1,6 @@
{
"name": "@hasura/dc-api-types",
"version": "0.20.0",
"version": "0.21.0",
"description": "Hasura GraphQL Engine Data Connector Agent API types",
"author": "Hasura (https://github.com/hasura/graphql-engine)",
"license": "Apache-2.0",

View File

@ -2120,6 +2120,9 @@
},
"InsertMutationOperation": {
"properties": {
"post_insert_check": {
"$ref": "#/components/schemas/Expression"
},
"returning_fields": {
"additionalProperties": {
"$ref": "#/components/schemas/Field"
@ -2258,6 +2261,9 @@
},
"UpdateMutationOperation": {
"properties": {
"post_update_check": {
"$ref": "#/components/schemas/Expression"
},
"returning_fields": {
"additionalProperties": {
"$ref": "#/components/schemas/Field"

View File

@ -2,11 +2,13 @@
/* tslint:disable */
/* eslint-disable */
import type { Expression } from './Expression';
import type { Field } from './Field';
import type { RowObject } from './RowObject';
import type { TableName } from './TableName';
export type InsertMutationOperation = {
post_insert_check?: Expression;
/**
* The fields to return for the rows affected by this insert operation
*/

View File

@ -8,6 +8,7 @@ import type { RowUpdate } from './RowUpdate';
import type { TableName } from './TableName';
export type UpdateMutationOperation = {
post_update_check?: Expression;
/**
* The fields to return for the rows affected by this update operation
*/

View File

@ -24,7 +24,7 @@
},
"dc-api-types": {
"name": "@hasura/dc-api-types",
"version": "0.20.0",
"version": "0.21.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",
@ -1197,7 +1197,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^7.0.0",
"@hasura/dc-api-types": "0.20.0",
"@hasura/dc-api-types": "0.21.0",
"fastify": "^3.29.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",
@ -1781,7 +1781,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.20.0",
"@hasura/dc-api-types": "0.21.0",
"fastify": "^4.4.0",
"fastify-metrics": "^9.2.1",
"nanoid": "^3.3.4",
@ -3125,7 +3125,7 @@
"version": "file:reference",
"requires": {
"@fastify/cors": "^7.0.0",
"@hasura/dc-api-types": "0.20.0",
"@hasura/dc-api-types": "0.21.0",
"@tsconfig/node16": "^1.0.3",
"@types/node": "^16.11.49",
"@types/xml2js": "^0.4.11",
@ -3514,7 +3514,7 @@
"version": "file:sqlite",
"requires": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.20.0",
"@hasura/dc-api-types": "0.21.0",
"@tsconfig/node16": "^1.0.3",
"@types/node": "^16.11.49",
"@types/sqlite3": "^3.1.8",

View File

@ -10,7 +10,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^7.0.0",
"@hasura/dc-api-types": "0.20.0",
"@hasura/dc-api-types": "0.21.0",
"fastify": "^3.29.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",
@ -44,7 +44,7 @@
}
},
"node_modules/@hasura/dc-api-types": {
"version": "0.20.0",
"version": "0.21.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",

View File

@ -22,7 +22,7 @@
},
"dependencies": {
"@fastify/cors": "^7.0.0",
"@hasura/dc-api-types": "0.20.0",
"@hasura/dc-api-types": "0.21.0",
"fastify": "^3.29.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",

View File

@ -10,7 +10,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.20.0",
"@hasura/dc-api-types": "0.21.0",
"fastify": "^4.4.0",
"fastify-metrics": "^9.2.1",
"nanoid": "^3.3.4",
@ -54,7 +54,7 @@
"license": "MIT"
},
"node_modules/@hasura/dc-api-types": {
"version": "0.20.0",
"version": "0.21.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",

View File

@ -22,7 +22,7 @@
},
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.20.0",
"@hasura/dc-api-types": "0.21.0",
"fastify-metrics": "^9.2.1",
"fastify": "^4.4.0",
"nanoid": "^3.3.4",

View File

@ -4,13 +4,12 @@ import { getSchema } from './schema';
import { explain, queryData } from './query';
import { getConfig, tryGetConfig } from './config';
import { capabilitiesResponse } from './capabilities';
import { QueryResponse, SchemaResponse, QueryRequest, CapabilitiesResponse, ExplainResponse, RawRequest, RawResponse, ErrorResponse } from '@hasura/dc-api-types';
import { QueryResponse, SchemaResponse, QueryRequest, CapabilitiesResponse, ExplainResponse, RawRequest, RawResponse, ErrorResponse, MutationRequest, MutationResponse } from '@hasura/dc-api-types';
import { connect } from './db';
import metrics from 'fastify-metrics';
import prometheus from 'prom-client';
import * as fs from 'fs'
import { runRawOperation } from './raw';
import { LOG_LEVEL, METRICS, PERMISSIVE_CORS, PRETTY_PRINT_LOGS } from './environment';
import { LOG_LEVEL, METRICS, MUTATIONS, PERMISSIVE_CORS, PRETTY_PRINT_LOGS } from './environment';
const port = Number(process.env.PORT) || 8100;
@ -144,6 +143,13 @@ server.post<{ Body: QueryRequest, Reply: ExplainResponse}>("/explain", async (re
return explain(config, sqlLogger, request.body);
});
if (MUTATIONS) {
server.post<{ Body: MutationRequest, Reply: MutationResponse}>("/mutation", async (request, _response) => {
server.log.info({ headers: request.headers, query: request.body, }, "mutation.request");
throw Error("Mutations not yet implemented");
});
}
server.get("/health", async (request, response) => {
const config = tryGetConfig(request);
response.type('application/json');
@ -164,13 +170,6 @@ server.get("/health", async (request, response) => {
}
});
server.get("/swagger.json", async (request, response) => {
fs.readFile('src/types/agent.openapi.json', (err, fileBuffer) => {
response.type('application/json');
response.send(err || fileBuffer)
})
})
server.get("/", async (request, response) => {
response.type('text/html');
return `<!DOCTYPE html>
@ -187,9 +186,9 @@ server.get("/", async (request, response) => {
<li><a href="/capabilities">GET /capabilities - Capabilities Metadata</a>
<li><a href="/schema">GET /schema - Agent Schema</a>
<li><a href="/query">POST /query - Query Handler</a>
<li><a href="/mutation">POST /mutation - Mutation Handler</a>
<li><a href="/raw">POST /raw - Raw Query Handler</a>
<li><a href="/health">GET /health - Healthcheck</a>
<li><a href="/swagger.json">GET /swagger.json - Swagger JSON</a>
<li><a href="/metrics">GET /metrics - Prometheus formatted metrics</a>
</ul>
</body>

View File

@ -654,7 +654,9 @@ library
, Hasura.Backends.DataConnector.Adapter.Types.Mutations
, Hasura.Backends.DataConnector.Agent.Client
, Hasura.Backends.DataConnector.Logging
, Hasura.Backends.DataConnector.Plan
, Hasura.Backends.DataConnector.Plan.Common
, Hasura.Backends.DataConnector.Plan.MutationPlan
, Hasura.Backends.DataConnector.Plan.QueryPlan
-- Exposed for benchmark:
, Hasura.Cache.Bounded

View File

@ -100,6 +100,7 @@ library
Test.DataConnector.MockAgent.BasicQuerySpec
Test.DataConnector.MockAgent.CustomScalarsSpec
Test.DataConnector.MockAgent.ErrorSpec
Test.DataConnector.MockAgent.InsertMutationsSpec
Test.DataConnector.MockAgent.MetadataApiSpec
Test.DataConnector.MockAgent.QueryRelationshipsSpec
Test.DataConnector.MockAgent.TransformedConfigurationSpec

View File

@ -0,0 +1,250 @@
module Test.DataConnector.MockAgent.InsertMutationsSpec
( spec,
)
where
import Data.Aeson qualified as Aeson
import Data.ByteString (ByteString)
import Data.HashMap.Strict qualified as HashMap
import Data.List.NonEmpty qualified as NE
import Harness.Backend.DataConnector.Mock (AgentRequest (..), MockRequestResults (..), mockAgentTest, mockMutationResponse)
import Harness.Backend.DataConnector.Mock qualified as Mock
import Harness.Quoter.Graphql (graphql)
import Harness.Quoter.Yaml (yaml)
import Harness.Test.BackendType qualified as BackendType
import Harness.Test.Fixture qualified as Fixture
import Harness.TestEnvironment (GlobalTestEnvironment, TestEnvironment)
import Harness.Yaml
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Prelude
import Test.Hspec
--------------------------------------------------------------------------------
spec :: SpecWith GlobalTestEnvironment
spec =
Fixture.runWithLocalTestEnvironment
( NE.fromList
[ (Fixture.fixture $ Fixture.Backend Mock.backendTypeMetadata)
{ Fixture.mkLocalTestEnvironment = Mock.mkLocalTestEnvironment,
Fixture.setupTeardown = \(testEnv, mockEnv) ->
[Mock.setupAction sourceMetadata Mock.agentConfig (testEnv, mockEnv)]
}
]
)
tests
--------------------------------------------------------------------------------
testRoleName :: ByteString
testRoleName = "test-role"
sourceMetadata :: Aeson.Value
sourceMetadata =
let source = BackendType.backendSourceName Mock.backendTypeMetadata
backendType = BackendType.backendTypeString Mock.backendTypeMetadata
in [yaml|
name : *source
kind: *backendType
tables:
- table: [Album]
object_relationships:
- name: Artist
using:
manual_configuration:
remote_table: [Artist]
column_mapping:
ArtistId: ArtistId
select_permissions:
- role: *testRoleName
permission:
columns:
- AlbumId
- ArtistId
- Title
filter: {}
insert_permissions:
- role: *testRoleName
permission:
check:
ArtistId: "X-Hasura-ArtistId"
set:
ArtistId: "X-Hasura-ArtistId"
columns:
- AlbumId
- ArtistId
- Title
- table: [Artist]
select_permissions:
- role: *testRoleName
permission:
columns:
- ArtistId
- Name
filter: {}
configuration: {}
|]
--------------------------------------------------------------------------------
tests :: Fixture.Options -> SpecWith (TestEnvironment, Mock.MockAgentEnvironment)
tests _opts = do
mockAgentTest "insert multiple rows with insert permissions" $ \performGraphqlRequest -> do
let headers = [("X-Hasura-ArtistId", "2"), ("X-Hasura-Role", testRoleName)]
let graphqlRequest =
[graphql|
mutation InsertMutation {
insert_Album(objects: [
{AlbumId: 9001, Title: "Super Mega Rock"},
{AlbumId: 9002, Title: "Accept This"}
]) {
insertCount: affected_rows
insertedRows: returning {
AlbumId
Title
Artist {
ArtistId
Name
}
}
}
}
|]
let mockAgentResponse =
API.MutationResponse
[ API.MutationOperationResults
{ API._morAffectedRows = 2,
API._morReturning =
Just
[ HashMap.fromList
[ (API.FieldName "insertedRows_AlbumId", API.mkColumnFieldValue $ Aeson.Number 9001),
(API.FieldName "insertedRows_Title", API.mkColumnFieldValue $ Aeson.String "Super Mega Rock"),
( API.FieldName "insertedRows_Artist",
API.mkRelationshipFieldValue $
rowsResponse
[ [ (API.FieldName "ArtistId", API.mkColumnFieldValue $ Aeson.Number 2),
(API.FieldName "Name", API.mkColumnFieldValue $ Aeson.String "Accept")
]
]
)
],
HashMap.fromList
[ (API.FieldName "insertedRows_AlbumId", API.mkColumnFieldValue $ Aeson.Number 9002),
(API.FieldName "insertedRows_Title", API.mkColumnFieldValue $ Aeson.String "Accept This"),
( API.FieldName "insertedRows_Artist",
API.mkRelationshipFieldValue $
rowsResponse
[ [ (API.FieldName "ArtistId", API.mkColumnFieldValue $ Aeson.Number 2),
(API.FieldName "Name", API.mkColumnFieldValue $ Aeson.String "Accept")
]
]
)
]
]
}
]
let mockConfig = Mock.chinookMock & mockMutationResponse mockAgentResponse
MockRequestResults {..} <- performGraphqlRequest mockConfig headers graphqlRequest
_mrrGraphqlResponse
`shouldBeYaml` [yaml|
data:
insert_Album:
insertCount: 2
insertedRows:
- AlbumId: 9001
Title: Super Mega Rock
Artist:
ArtistId: 2
Name: Accept
- AlbumId: 9002
Title: Accept This
Artist:
ArtistId: 2
Name: Accept
|]
let expectedRequest =
API.MutationRequest
{ API._mrTableRelationships =
[ API.TableRelationships
{ API._trSourceTable = API.TableName ("Album" :| []),
API._trRelationships =
HashMap.fromList
[ ( API.RelationshipName "Artist",
API.Relationship
{ API._rTargetTable = API.TableName ("Artist" :| []),
API._rRelationshipType = API.ObjectRelationship,
API._rColumnMapping = HashMap.fromList [(API.ColumnName "ArtistId", API.ColumnName "ArtistId")]
}
)
]
}
],
API._mrInsertSchema =
[ API.TableInsertSchema
{ API._tisTable = API.TableName ("Album" :| []),
API._tisFields =
HashMap.fromList
[ (API.FieldName "AlbumId", API.ColumnInsert $ API.ColumnInsertSchema (API.ColumnName "AlbumId") (API.ScalarType "number")),
(API.FieldName "ArtistId", API.ColumnInsert $ API.ColumnInsertSchema (API.ColumnName "ArtistId") (API.ScalarType "number")),
(API.FieldName "Title", API.ColumnInsert $ API.ColumnInsertSchema (API.ColumnName "Title") (API.ScalarType "string"))
]
}
],
API._mrOperations =
[ API.InsertOperation $
API.InsertMutationOperation
{ API._imoTable = API.TableName ("Album" :| []),
API._imoRows =
[ API.RowObject $
HashMap.fromList
[ (API.FieldName "AlbumId", API.mkColumnInsertFieldValue $ Aeson.Number 9001),
(API.FieldName "ArtistId", API.mkColumnInsertFieldValue $ Aeson.Number 2),
(API.FieldName "Title", API.mkColumnInsertFieldValue $ Aeson.String "Super Mega Rock")
],
API.RowObject $
HashMap.fromList
[ (API.FieldName "AlbumId", API.mkColumnInsertFieldValue $ Aeson.Number 9002),
(API.FieldName "ArtistId", API.mkColumnInsertFieldValue $ Aeson.Number 2),
(API.FieldName "Title", API.mkColumnInsertFieldValue $ Aeson.String "Accept This")
]
],
API._imoPostInsertCheck =
Just $
API.ApplyBinaryComparisonOperator
API.Equal
(API.ComparisonColumn API.CurrentTable (API.ColumnName "ArtistId") $ API.ScalarType "number")
(API.ScalarValue (Aeson.Number 2) $ API.ScalarType "number"),
API._imoReturningFields =
HashMap.fromList
[ (API.FieldName "insertedRows_AlbumId", API.ColumnField (API.ColumnName "AlbumId") (API.ScalarType "number")),
(API.FieldName "insertedRows_Title", API.ColumnField (API.ColumnName "Title") (API.ScalarType "string")),
( API.FieldName "insertedRows_Artist",
API.RelField
( API.RelationshipField
(API.RelationshipName "Artist")
API.Query
{ _qFields =
Just $
HashMap.fromList
[ (API.FieldName "ArtistId", API.ColumnField (API.ColumnName "ArtistId") $ API.ScalarType "number"),
(API.FieldName "Name", API.ColumnField (API.ColumnName "Name") $ API.ScalarType "string")
],
_qAggregates = Nothing,
_qLimit = Nothing,
_qOffset = Nothing,
_qWhere = Nothing,
_qOrderBy = Nothing
}
)
)
]
}
]
}
_mrrRecordedRequest `shouldBe` Just (Mutation expectedRequest)
rowsResponse :: [[(API.FieldName, API.FieldValue)]] -> API.QueryResponse
rowsResponse rows = API.QueryResponse (Just $ HashMap.fromList <$> rows) Nothing

View File

@ -59,7 +59,7 @@ sourceMetadata =
tests :: Fixture.Options -> SpecWith (TestEnvironment, Mock.MockAgentEnvironment)
tests opts = do
describe "MetadataAPI Mock Tests" $ do
it "Should peform a template transform when calling _get_source_tables" $ \(testEnvironment, Mock.MockAgentEnvironment {maeQueryConfig}) -> do
it "Should peform a template transform when calling _get_source_tables" $ \(testEnvironment, Mock.MockAgentEnvironment {maeRecordedRequestConfig}) -> do
let sortYamlArray :: Aeson.Value -> IO Aeson.Value
sortYamlArray (Aeson.Array a) = pure $ Aeson.Array (Vector.fromList (sort (Vector.toList a)))
sortYamlArray _ = fail "Should return Array"
@ -67,8 +67,8 @@ tests opts = do
case BackendType.backendSourceName <$> getBackendTypeConfig testEnvironment of
Nothing -> pendingWith "Backend not found for testEnvironment"
Just sourceString -> do
queryConfig <- IORef.readIORef maeQueryConfig
IORef.writeIORef maeQueryConfig Nothing
queryConfig <- IORef.readIORef maeRecordedRequestConfig
IORef.writeIORef maeRecordedRequestConfig Nothing
queryConfig `shouldBe` Just (Config $ KM.fromList [("DEBUG", Aeson.Object (KM.fromList [("test", Aeson.String "data")]))])

View File

@ -17,6 +17,7 @@ module Hasura.Backends.DataConnector.API
capabilitiesCase,
schemaCase,
queryCase,
mutationCase,
openApiSchema,
Routes (..),
apiClient,
@ -99,6 +100,19 @@ queryCase defaultAction queryAction errorAction union = do
type QueryResponses = '[V0.QueryResponse, V0.ErrorResponse, V0.ErrorResponse400]
-- | This function defines a central place to ensure that all cases are covered for mutation and error responses.
-- When additional responses are added to the Union, this should be updated to ensure that all responses have been considered.
mutationCase :: a -> (MutationResponse -> a) -> (ErrorResponse -> a) -> Union MutationResponses -> a
mutationCase defaultAction mutationAction errorAction union = do
let mutationM = matchUnion @MutationResponse union
let errorM = matchUnion @ErrorResponse union
let errorM400 = matchUnion @ErrorResponse400 union
case (mutationM, errorM, errorM400) of
(Nothing, Nothing, Nothing) -> defaultAction
(Just c, _, _) -> mutationAction c
(_, Just e, _) -> errorAction e
(_, _, Just (WithStatus e)) -> errorAction e
type MutationResponses = '[V0.MutationResponse, V0.ErrorResponse, V0.ErrorResponse400]
type QueryApi =

View File

@ -214,6 +214,8 @@ instance HasCodec MutationOperation where
.= _imoTable
<*> requiredField "rows" "The rows to insert into the table"
.= _imoRows
<*> optionalFieldOrNull "post_insert_check" "An expression that all inserted rows must match after they have been inserted, otherwise the changes must be reverted"
.= _imoPostInsertCheck
<*> optionalFieldOrNullWithOmittedDefault "returning_fields" mempty "The fields to return for the rows affected by this insert operation"
.= _imoReturningFields
@ -226,6 +228,8 @@ instance HasCodec MutationOperation where
.= _umoWhere
<*> requiredField "updates" "The updates to make to the matched rows in the table"
.= _umoUpdates
<*> optionalFieldOrNull "post_update_check" "An expression that all updated rows must match after they have been updated, otherwise the changes must be reverted"
.= _umoPostUpdateCheck
<*> optionalFieldOrNullWithOmittedDefault "returning_fields" mempty "The fields to return for the rows affected by this update operation"
.= _umoReturningFields
@ -258,6 +262,9 @@ data InsertMutationOperation = InsertMutationOperation
_imoTable :: API.V0.TableName,
-- | The rows to insert into the table
_imoRows :: [RowObject],
-- | An expression that all inserted rows must match after they have been inserted,
-- otherwise the changes must be reverted
_imoPostInsertCheck :: Maybe API.V0.Expression,
-- | The fields to return that represent a projection over the set of rows inserted
-- after they are inserted (after insertion they include calculated columns, relations etc)
_imoReturningFields :: HashMap API.V0.FieldName API.V0.Field
@ -370,6 +377,9 @@ data UpdateMutationOperation = UpdateMutationOperation
_umoWhere :: Maybe API.V0.Expression,
-- | The updates to perform against each row
_umoUpdates :: [RowUpdate],
-- | An expression that all updated rows must match after they have been updated,
-- otherwise the changes must be reverted
_umoPostUpdateCheck :: Maybe API.V0.Expression,
-- | The fields to return that represent a projection over the set of rows updated
-- after they are updated (ie. with their updated values)
_umoReturningFields :: HashMap API.V0.FieldName API.V0.Field

View File

@ -19,6 +19,11 @@ module Harness.Backend.DataConnector.Mock
mockAgentPort,
defaultTestCase,
chinookMock,
AgentRequest (..),
MockRequestResults (..),
mockQueryResponse,
mockMutationResponse,
mockAgentTest,
)
where
@ -40,14 +45,14 @@ import Harness.TestEnvironment (TestEnvironment (..))
import Harness.Yaml (shouldReturnYaml)
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Prelude
import Test.Hspec (shouldBe)
import Test.Hspec (Arg, Expectation, SpecWith, it, shouldBe)
--------------------------------------------------------------------------------
backendTypeMetadata :: BackendType.BackendTypeConfig
backendTypeMetadata =
BackendType.BackendTypeConfig
{ backendType = BackendType.DataConnectorReference,
{ backendType = BackendType.DataConnectorMock,
backendSourceName = "mock",
backendCapabilities = Nothing,
backendTypeString = "mock",
@ -125,18 +130,18 @@ dataconnector:
data MockAgentEnvironment = MockAgentEnvironment
{ maeConfig :: I.IORef MockConfig,
maeQuery :: I.IORef (Maybe API.QueryRequest),
maeRecordedRequest :: I.IORef (Maybe AgentRequest),
maeThread :: Async (),
maeQueryConfig :: I.IORef (Maybe API.Config)
maeRecordedRequestConfig :: I.IORef (Maybe API.Config)
}
-- | Create the 'I.IORef's and launch the servant mock agent.
mkLocalTestEnvironment :: TestEnvironment -> Managed MockAgentEnvironment
mkLocalTestEnvironment _ = mkTestResource do
maeConfig <- I.newIORef chinookMock
maeQuery <- I.newIORef Nothing
maeQueryConfig <- I.newIORef Nothing
maeThread <- Async.async $ runMockServer maeConfig maeQuery maeQueryConfig
maeRecordedRequest <- I.newIORef Nothing
maeRecordedRequestConfig <- I.newIORef Nothing
maeThread <- Async.async $ runMockServer maeConfig maeRecordedRequest maeRecordedRequestConfig
pure $
AcquiredResource
{ resourceValue = MockAgentEnvironment {..},
@ -201,13 +206,49 @@ runTest opts TestCase {..} (testEnvironment, MockAgentEnvironment {..}) = do
_then
-- Read the logged 'API.QueryRequest' from the Agent
query <- I.readIORef maeQuery
I.writeIORef maeQuery Nothing
query <- (>>= \case Query query -> Just query; _ -> Nothing) <$> I.readIORef maeRecordedRequest
I.writeIORef maeRecordedRequest Nothing
-- Read the logged 'API.Config' from the Agent
queryConfig <- I.readIORef maeQueryConfig
I.writeIORef maeQueryConfig Nothing
queryConfig <- I.readIORef maeRecordedRequestConfig
I.writeIORef maeRecordedRequestConfig Nothing
-- Assert that the 'API.QueryRequest' was constructed how we expected.
for_ _whenQuery ((query `shouldBe`) . Just)
for_ _whenConfig ((queryConfig `shouldBe`) . Just)
data MockRequestResults = MockRequestResults
{ _mrrGraphqlResponse :: Aeson.Value,
_mrrRecordedRequest :: Maybe AgentRequest,
_mrrRecordedRequestConfig :: Maybe API.Config
}
mockQueryResponse :: API.QueryResponse -> MockConfig -> MockConfig
mockQueryResponse queryResponse mockConfig =
mockConfig {_queryResponse = \_ -> Right queryResponse}
mockMutationResponse :: API.MutationResponse -> MockConfig -> MockConfig
mockMutationResponse mutationResponse mockConfig =
mockConfig {_mutationResponse = \_ -> Right mutationResponse}
mockAgentTest :: String -> ((MockConfig -> RequestHeaders -> Aeson.Value -> IO MockRequestResults) -> Expectation) -> SpecWith (Arg ((TestEnvironment, MockAgentEnvironment) -> Expectation))
mockAgentTest name testBody =
it name $ \env -> testBody (postMockAgentGraphqlWithHeaders env)
postMockAgentGraphqlWithHeaders :: (TestEnvironment, MockAgentEnvironment) -> MockConfig -> RequestHeaders -> Aeson.Value -> IO MockRequestResults
postMockAgentGraphqlWithHeaders (testEnvironment, MockAgentEnvironment {..}) mockConfig requestHeaders graphqlRequest = do
-- Set the Agent with the 'MockConfig'
I.writeIORef maeConfig mockConfig
-- Reset recording state
I.writeIORef maeRecordedRequest Nothing
I.writeIORef maeRecordedRequestConfig Nothing
-- Perform GraphQL request
graphqlResponse <- GraphqlEngine.postGraphqlWithHeaders testEnvironment requestHeaders graphqlRequest
-- Capture recordings
recordedRequest <- I.readIORef maeRecordedRequest
recordedRequestConfig <- I.readIORef maeRecordedRequestConfig
pure $ MockRequestResults graphqlResponse recordedRequest recordedRequestConfig

View File

@ -2,7 +2,8 @@
-- | Mock Agent Warp server backend
module Harness.Backend.DataConnector.Mock.Server
( MockConfig (..),
( AgentRequest (..),
MockConfig (..),
chinookMock,
mockAgentPort,
runMockServer,
@ -23,13 +24,17 @@ import Servant
--------------------------------------------------------------------------------
-- Note: Only the _queryResponse field allows mock errors at present.
-- This can be extended at a later point if required.
--
data AgentRequest
= Schema
| Query API.QueryRequest
| Mutation API.MutationRequest
deriving stock (Eq, Show)
data MockConfig = MockConfig
{ _capabilitiesResponse :: API.CapabilitiesResponse,
_schemaResponse :: API.SchemaResponse,
_queryResponse :: API.QueryRequest -> Either API.ErrorResponse API.QueryResponse
_queryResponse :: API.QueryRequest -> Either API.ErrorResponse API.QueryResponse,
_mutationResponse :: API.MutationRequest -> Either API.ErrorResponse API.MutationResponse
}
mkTableName :: Text -> API.TableName
@ -43,7 +48,15 @@ capabilities =
API.Capabilities
{ API._cDataSchema = API.defaultDataSchemaCapabilities,
API._cQueries = Just API.QueryCapabilities,
API._cMutations = Nothing,
API._cMutations =
Just $
API.MutationCapabilities
{ API._mcInsertCapabilities = Just API.InsertCapabilities {API._icSupportsNestedInserts = False},
API._mcUpdateCapabilities = Just API.UpdateCapabilities,
API._mcDeleteCapabilities = Just API.DeleteCapabilities,
API._mcAtomicitySupportLevel = Just API.HeterogeneousOperationsAtomicity,
API._mcReturningCapabilities = Just API.ReturningCapabilities
},
API._cSubscriptions = Nothing,
API._cScalarTypes = scalarTypesCapabilities,
API._cRelationships = Just API.RelationshipCapabilities {},
@ -744,7 +757,8 @@ chinookMock =
MockConfig
{ _capabilitiesResponse = capabilities,
_schemaResponse = schema,
_queryResponse = \_ -> Right $ API.QueryResponse (Just []) Nothing
_queryResponse = \_request -> Right $ API.QueryResponse (Just []) Nothing,
_mutationResponse = \_request -> Right $ API.MutationResponse []
}
--------------------------------------------------------------------------------
@ -754,29 +768,35 @@ mockCapabilitiesHandler mcfg = liftIO $ do
cfg <- I.readIORef mcfg
pure $ inject $ SOP.I $ _capabilitiesResponse cfg
mockSchemaHandler :: I.IORef MockConfig -> I.IORef (Maybe API.Config) -> API.SourceName -> API.Config -> Handler (Union API.SchemaResponses)
mockSchemaHandler mcfg mQueryConfig _sourceName queryConfig = liftIO $ do
mockSchemaHandler :: I.IORef MockConfig -> I.IORef (Maybe AgentRequest) -> I.IORef (Maybe API.Config) -> API.SourceName -> API.Config -> Handler (Union API.SchemaResponses)
mockSchemaHandler mcfg mRecordedRequest mRecordedRequestConfig _sourceName requestConfig = liftIO $ do
cfg <- I.readIORef mcfg
I.writeIORef mQueryConfig (Just queryConfig)
I.writeIORef mRecordedRequest (Just Schema)
I.writeIORef mRecordedRequestConfig (Just requestConfig)
pure $ inject $ SOP.I $ _schemaResponse cfg
mockQueryHandler :: I.IORef MockConfig -> I.IORef (Maybe API.QueryRequest) -> I.IORef (Maybe API.Config) -> API.SourceName -> API.Config -> API.QueryRequest -> Handler (Union API.QueryResponses)
mockQueryHandler mcfg mquery mQueryCfg _sourceName queryConfig query = liftIO $ do
mockQueryHandler :: I.IORef MockConfig -> I.IORef (Maybe AgentRequest) -> I.IORef (Maybe API.Config) -> API.SourceName -> API.Config -> API.QueryRequest -> Handler (Union API.QueryResponses)
mockQueryHandler mcfg mRecordedRequest mRecordedRequestConfig _sourceName requestConfig query = liftIO $ do
handler <- fmap _queryResponse $ I.readIORef mcfg
I.writeIORef mquery (Just query)
I.writeIORef mQueryCfg (Just queryConfig)
I.writeIORef mRecordedRequest (Just $ Query query)
I.writeIORef mRecordedRequestConfig (Just requestConfig)
case handler query of
Left e -> pure $ inject $ SOP.I e
Right r -> pure $ inject $ SOP.I r
Left err -> pure $ inject $ SOP.I err
Right response -> pure $ inject $ SOP.I response
mockMutationHandler :: I.IORef MockConfig -> I.IORef (Maybe AgentRequest) -> I.IORef (Maybe API.Config) -> API.SourceName -> API.Config -> API.MutationRequest -> Handler (Union API.MutationResponses)
mockMutationHandler mcfg mRecordedRequest mRecordedRequestConfig _sourceName requestConfig mutation = liftIO $ do
handler <- fmap _mutationResponse $ I.readIORef mcfg
I.writeIORef mRecordedRequest (Just $ Mutation mutation)
I.writeIORef mRecordedRequestConfig (Just requestConfig)
case handler mutation of
Left err -> pure $ inject $ SOP.I err
Right response -> pure $ inject $ SOP.I response
-- Returns an empty explain response for now
explainHandler :: API.SourceName -> API.Config -> API.QueryRequest -> Handler API.ExplainResponse
explainHandler _sourceName _queryConfig _query = pure $ API.ExplainResponse [] ""
-- Returns an empty mutation response for now
mutationsHandler :: API.SourceName -> API.Config -> API.MutationRequest -> Handler (Union API.MutationResponses)
mutationsHandler _ _ _ = pure . inject . SOP.I $ API.MutationResponse []
healthcheckHandler :: Maybe API.SourceName -> Maybe API.Config -> Handler NoContent
healthcheckHandler _sourceName _config = pure NoContent
@ -786,13 +806,13 @@ metricsHandler = pure "# NOTE: Metrics would go here."
rawHandler :: API.SourceName -> API.Config -> API.RawRequest -> Handler API.RawResponse
rawHandler _ _ _ = pure $ API.RawResponse [] -- NOTE: Raw query response would go here.
dcMockableServer :: I.IORef MockConfig -> I.IORef (Maybe API.QueryRequest) -> I.IORef (Maybe API.Config) -> Server API.Api
dcMockableServer mcfg mquery mQueryConfig =
dcMockableServer :: I.IORef MockConfig -> I.IORef (Maybe AgentRequest) -> I.IORef (Maybe API.Config) -> Server API.Api
dcMockableServer mcfg mRecordedRequest mRecordedRequestConfig =
mockCapabilitiesHandler mcfg
:<|> mockSchemaHandler mcfg mQueryConfig
:<|> mockQueryHandler mcfg mquery mQueryConfig
:<|> mockSchemaHandler mcfg mRecordedRequest mRecordedRequestConfig
:<|> mockQueryHandler mcfg mRecordedRequest mRecordedRequestConfig
:<|> explainHandler
:<|> mutationsHandler
:<|> mockMutationHandler mcfg mRecordedRequest mRecordedRequestConfig
:<|> healthcheckHandler
:<|> metricsHandler
:<|> rawHandler
@ -800,7 +820,7 @@ dcMockableServer mcfg mquery mQueryConfig =
mockAgentPort :: Warp.Port
mockAgentPort = 65006
runMockServer :: I.IORef MockConfig -> I.IORef (Maybe API.QueryRequest) -> I.IORef (Maybe API.Config) -> IO ()
runMockServer mcfg mquery mQueryConfig = do
let app = serve (Proxy :: Proxy API.Api) $ dcMockableServer mcfg mquery mQueryConfig
runMockServer :: I.IORef MockConfig -> I.IORef (Maybe AgentRequest) -> I.IORef (Maybe API.Config) -> IO ()
runMockServer mcfg mRecordedRequest mRecordedRequestConfig = do
let app = serve (Proxy :: Proxy API.Api) $ dcMockableServer mcfg mRecordedRequest mRecordedRequestConfig
Warp.run mockAgentPort app

View File

@ -124,12 +124,13 @@ bqDBMutationPlan ::
( MonadError E.QErr m
) =>
UserInfo ->
Env.Environment ->
Options.StringifyNumbers ->
SourceName ->
SourceConfig 'BigQuery ->
MutationDB 'BigQuery Void (UnpreparedValue 'BigQuery) ->
m (DBStepInfo 'BigQuery)
bqDBMutationPlan _userInfo _stringifyNum _sourceName _sourceConfig _mrf =
bqDBMutationPlan _userInfo _environment _stringifyNum _sourceName _sourceConfig _mrf =
throw500 "mutations are not supported in BigQuery; this should be unreachable"
-- explain

View File

@ -1,22 +1,26 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.DataConnector.Adapter.Execute
(
( DataConnectorPreparedQuery (..),
encodePreparedQueryToJsonText,
)
where
--------------------------------------------------------------------------------
import Data.Aeson qualified as J
import Data.ByteString.Lazy qualified as BL
import Data.Environment qualified as Env
import Data.Text.Encoding qualified as TE
import Data.Text.Extended (toTxt)
import Hasura.Backends.DataConnector.API (errorResponseSummary, queryCase)
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.API.V0.ErrorResponse (ErrorResponse (..))
import Hasura.Backends.DataConnector.Adapter.ConfigTransform (transformSourceConfig)
import Hasura.Backends.DataConnector.Adapter.Types (SourceConfig (..))
import Hasura.Backends.DataConnector.Agent.Client (AgentClientT)
import Hasura.Backends.DataConnector.Plan qualified as DC
import Hasura.Backends.DataConnector.Plan.Common qualified as DC
import Hasura.Backends.DataConnector.Plan.MutationPlan qualified as DC
import Hasura.Backends.DataConnector.Plan.QueryPlan qualified as DC
import Hasura.Base.Error (Code (..), QErr, throw400, throw400WithDetail, throw500)
import Hasura.EncJSON (EncJSON, encJFromBuilder, encJFromJValue)
import Hasura.GraphQL.Execute.Backend (BackendExecute (..), DBStepInfo (..), ExplainPlan (..))
@ -30,39 +34,59 @@ import Hasura.Tracing (MonadTrace)
import Hasura.Tracing qualified as Tracing
import Servant.Client.Core.HasClient ((//))
import Servant.Client.Generic (genericClient)
import Witch qualified
data DataConnectorPreparedQuery
= QueryRequest API.QueryRequest
| MutationRequest API.MutationRequest
encodePreparedQueryToJsonText :: DataConnectorPreparedQuery -> Text
encodePreparedQueryToJsonText = \case
QueryRequest req -> encodeToJsonText req
MutationRequest req -> encodeToJsonText req
encodeToJsonText :: J.ToJSON a => a -> Text
encodeToJsonText =
TE.decodeUtf8 . BL.toStrict . J.encode
--------------------------------------------------------------------------------
instance BackendExecute 'DataConnector where
type PreparedQuery 'DataConnector = API.QueryRequest
type PreparedQuery 'DataConnector = DataConnectorPreparedQuery
type MultiplexedQuery 'DataConnector = Void
type ExecutionMonad 'DataConnector = AgentClientT (Tracing.TraceT (ExceptT QErr IO))
mkDBQueryPlan UserInfo {..} env sourceName sourceConfig ir = do
queryPlan@DC.QueryPlan {..} <- DC.mkPlan _uiSession sourceConfig ir
queryPlan@DC.Plan {..} <- DC.mkQueryPlan _uiSession sourceConfig ir
transformedSourceConfig <- transformSourceConfig sourceConfig [("$session", J.toJSON _uiSession), ("$env", J.toJSON env)] env
pure
DBStepInfo
{ dbsiSourceName = sourceName,
dbsiSourceConfig = transformedSourceConfig,
dbsiPreparedQuery = Just _qpRequest,
dbsiPreparedQuery = Just $ QueryRequest _pRequest,
dbsiAction = buildQueryAction sourceName transformedSourceConfig queryPlan
}
mkDBQueryExplain fieldName UserInfo {..} sourceName sourceConfig ir = do
queryPlan@DC.QueryPlan {..} <- DC.mkPlan _uiSession sourceConfig ir
queryPlan@DC.Plan {..} <- DC.mkQueryPlan _uiSession sourceConfig ir
transformedSourceConfig <- transformSourceConfig sourceConfig [("$session", J.toJSON _uiSession), ("$env", J.object [])] Env.emptyEnvironment
pure $
mkAnyBackend @'DataConnector
DBStepInfo
{ dbsiSourceName = sourceName,
dbsiSourceConfig = transformedSourceConfig,
dbsiPreparedQuery = Just _qpRequest,
dbsiPreparedQuery = Just $ QueryRequest _pRequest,
dbsiAction = buildExplainAction fieldName sourceName transformedSourceConfig queryPlan
}
mkDBMutationPlan _ _ _ _ _ =
throw400 NotSupported "mkDBMutationPlan: not implemented for the Data Connector backend."
mkDBMutationPlan UserInfo {..} env _stringifyNum sourceName sourceConfig mutationDB = do
mutationPlan@DC.Plan {..} <- DC.mkMutationPlan _uiSession mutationDB
transformedSourceConfig <- transformSourceConfig sourceConfig [("$session", J.toJSON _uiSession), ("$env", J.toJSON env)] env
pure
DBStepInfo
{ dbsiSourceName = sourceName,
dbsiSourceConfig = transformedSourceConfig,
dbsiPreparedQuery = Just $ MutationRequest _pRequest,
dbsiAction = buildMutationAction sourceName transformedSourceConfig mutationPlan
}
mkLiveQuerySubscriptionPlan _ _ _ _ _ =
throw400 NotSupported "mkLiveQuerySubscriptionPlan: not implemented for the Data Connector backend."
mkDBStreamingSubscriptionPlan _ _ _ _ =
@ -72,30 +96,28 @@ instance BackendExecute 'DataConnector where
mkSubscriptionExplain _ =
throw400 NotSupported "mkSubscriptionExplain: not implemented for the Data Connector backend."
buildQueryAction :: (MonadIO m, MonadTrace m, MonadError QErr m) => RQL.SourceName -> SourceConfig -> DC.QueryPlan -> AgentClientT m EncJSON
buildQueryAction sourceName SourceConfig {..} DC.QueryPlan {..} = do
buildQueryAction :: (MonadIO m, MonadTrace m, MonadError QErr m) => RQL.SourceName -> SourceConfig -> DC.Plan API.QueryRequest API.QueryResponse -> AgentClientT m EncJSON
buildQueryAction sourceName SourceConfig {..} DC.Plan {..} = do
-- NOTE: Should this check occur during query construction in 'mkPlan'?
when (DC.queryHasRelations _qpRequest && isNothing (API._cRelationships _scCapabilities)) $
when (DC.queryHasRelations _pRequest && isNothing (API._cRelationships _scCapabilities)) $
throw400 NotSupported "Agents must provide their own dataloader."
let apiQueryRequest = Witch.into @API.QueryRequest _qpRequest
queryResponse <- queryGuard =<< (genericClient // API._query) (toTxt sourceName) _scConfig apiQueryRequest
reshapedResponse <- _qpResponseReshaper queryResponse
queryResponse <- queryGuard =<< (genericClient // API._query) (toTxt sourceName) _scConfig _pRequest
reshapedResponse <- _pResponseReshaper queryResponse
pure . encJFromBuilder $ J.fromEncoding reshapedResponse
where
errorAction e = throw400WithDetail DataConnectorError (errorResponseSummary e) (_crDetails e)
defaultAction = throw400 DataConnectorError "Unexpected data connector capabilities response - Unexpected Type"
queryGuard = queryCase defaultAction pure errorAction
errorAction e = throw400WithDetail DataConnectorError (API.errorResponseSummary e) (_crDetails e)
defaultAction = throw400 DataConnectorError "Unexpected data connector query response - Unexpected Type"
queryGuard = API.queryCase defaultAction pure errorAction
-- Delegates the generation to the Agent's /explain endpoint if it has that capability,
-- otherwise, returns the IR sent to the agent.
buildExplainAction :: (MonadIO m, MonadTrace m, MonadError QErr m) => GQL.RootFieldAlias -> RQL.SourceName -> SourceConfig -> DC.QueryPlan -> AgentClientT m EncJSON
buildExplainAction fieldName sourceName SourceConfig {..} DC.QueryPlan {..} =
buildExplainAction :: (MonadIO m, MonadTrace m, MonadError QErr m) => GQL.RootFieldAlias -> RQL.SourceName -> SourceConfig -> DC.Plan API.QueryRequest API.QueryResponse -> AgentClientT m EncJSON
buildExplainAction fieldName sourceName SourceConfig {..} DC.Plan {..} =
case API._cExplain _scCapabilities of
Nothing -> pure . encJFromJValue . toExplainPlan fieldName $ _qpRequest
Nothing -> pure . encJFromJValue . toExplainPlan fieldName $ _pRequest
Just API.ExplainCapabilities -> do
let apiQueryRequest = Witch.into @API.QueryRequest _qpRequest
explainResponse <- (genericClient // API._explain) (toTxt sourceName) _scConfig apiQueryRequest
explainResponse <- (genericClient // API._explain) (toTxt sourceName) _scConfig _pRequest
pure . encJFromJValue $
ExplainPlan
fieldName
@ -104,4 +126,14 @@ buildExplainAction fieldName sourceName SourceConfig {..} DC.QueryPlan {..} =
toExplainPlan :: GQL.RootFieldAlias -> API.QueryRequest -> ExplainPlan
toExplainPlan fieldName queryRequest =
ExplainPlan fieldName (Just "") (Just [DC.renderQuery $ queryRequest])
ExplainPlan fieldName (Just "") (Just [encodeToJsonText queryRequest])
buildMutationAction :: (MonadIO m, MonadTrace m, MonadError QErr m) => RQL.SourceName -> SourceConfig -> DC.Plan API.MutationRequest API.MutationResponse -> AgentClientT m EncJSON
buildMutationAction sourceName SourceConfig {..} DC.Plan {..} = do
queryResponse <- mutationGuard =<< (genericClient // API._mutation) (toTxt sourceName) _scConfig _pRequest
reshapedResponse <- _pResponseReshaper queryResponse
pure . encJFromBuilder $ J.fromEncoding reshapedResponse
where
errorAction e = throw400WithDetail DataConnectorError (API.errorResponseSummary e) (_crDetails e)
defaultAction = throw400 DataConnectorError "Unexpected data connector mutations response - Unexpected Type"
mutationGuard = API.mutationCase defaultAction pure errorAction

View File

@ -7,12 +7,10 @@ module Hasura.Backends.DataConnector.Adapter.Transport () where
import Control.Exception.Safe (throwIO)
import Data.Aeson qualified as J
import Data.Text.Extended ((<>>))
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.Adapter.Execute ()
import Hasura.Backends.DataConnector.Adapter.Execute (DataConnectorPreparedQuery (..), encodePreparedQueryToJsonText)
import Hasura.Backends.DataConnector.Adapter.Types (SourceConfig (..))
import Hasura.Backends.DataConnector.Agent.Client (AgentClientContext (..), AgentClientT, runAgentClientT)
import Hasura.Backends.DataConnector.Plan qualified as DC
import Hasura.Base.Error (Code (NotSupported), QErr, throw400)
import Hasura.Base.Error (QErr)
import Hasura.EncJSON (EncJSON)
import Hasura.GraphQL.Execute.Backend (DBStepInfo (..))
import Hasura.GraphQL.Logging qualified as HGL
@ -31,8 +29,7 @@ import Hasura.Tracing qualified as Tracing
instance BackendTransport 'DataConnector where
runDBQuery = runDBQuery'
runDBQueryExplain = runDBQueryExplain'
runDBMutation _ _ _ _ _ _ _ _ =
throw400 NotSupported "runDBMutation: not implemented for the Data Connector backend."
runDBMutation = runDBMutation'
runDBStreamingSubscription _ _ _ =
liftIO . throwIO $ userError "runDBStreamingSubscription: not implemented for the Data Connector backend."
runDBSubscription _ _ _ =
@ -51,7 +48,7 @@ runDBQuery' ::
Logger Hasura ->
SourceConfig ->
AgentClientT (Tracing.TraceT (ExceptT QErr IO)) a ->
Maybe API.QueryRequest ->
Maybe DataConnectorPreparedQuery ->
m (DiffTime, a)
runDBQuery' requestId query fieldName _userInfo logger SourceConfig {..} action queryRequest = do
void $ HGL.logQueryLog logger $ mkQueryLog query fieldName queryRequest requestId
@ -64,13 +61,13 @@ runDBQuery' requestId query fieldName _userInfo logger SourceConfig {..} action
mkQueryLog ::
GQLReqUnparsed ->
RootFieldAlias ->
Maybe API.QueryRequest ->
Maybe DataConnectorPreparedQuery ->
RequestId ->
HGL.QueryLog
mkQueryLog gqlQuery fieldName maybeQuery requestId =
HGL.QueryLog
gqlQuery
((\query -> (fieldName, HGL.GeneratedQuery (DC.renderQuery query) J.Null)) <$> maybeQuery)
((\query -> (fieldName, HGL.GeneratedQuery (encodePreparedQueryToJsonText query) J.Null)) <$> maybeQuery)
requestId
HGL.QueryLogKindDatabase
@ -85,3 +82,26 @@ runDBQueryExplain' (DBStepInfo _ SourceConfig {..} _ action) =
. Tracing.ignoreTraceT
. flip runAgentClientT (AgentClientContext nullLogger _scEndpoint _scManager _scTimeoutMicroseconds)
$ action
runDBMutation' ::
( MonadIO m,
MonadError QErr m,
Tracing.MonadTrace m,
HGL.MonadQueryLog m
) =>
RequestId ->
GQLReqUnparsed ->
RootFieldAlias ->
UserInfo ->
Logger Hasura ->
SourceConfig ->
AgentClientT (Tracing.TraceT (ExceptT QErr IO)) a ->
Maybe DataConnectorPreparedQuery ->
m (DiffTime, a)
runDBMutation' requestId query fieldName _userInfo logger SourceConfig {..} action queryRequest = do
void $ HGL.logQueryLog logger $ mkQueryLog query fieldName queryRequest requestId
withElapsedTime
. Tracing.trace ("Data Connector backend mutation for root field " <>> fieldName)
. Tracing.interpTraceT (liftEitherM . liftIO . runExceptT)
. flip runAgentClientT (AgentClientContext logger _scEndpoint _scManager _scTimeoutMicroseconds)
$ action

View File

@ -1,760 +0,0 @@
module Hasura.Backends.DataConnector.Plan
( QueryPlan (..),
mkPlan,
renderQuery,
queryHasRelations,
)
where
--------------------------------------------------------------------------------
import Control.Monad.Trans.Writer.CPS qualified as CPS
import Data.Aeson qualified as J
import Data.Aeson.Encoding qualified as JE
import Data.Aeson.Types qualified as J
import Data.Bifunctor (Bifunctor (bimap))
import Data.ByteString qualified as BS
import Data.ByteString.Lazy qualified as BL
import Data.HashMap.Strict qualified as HashMap
import Data.List.NonEmpty qualified as NE
import Data.Semigroup (Min (..))
import Data.Text qualified as T
import Data.Text.Encoding qualified as TE
import Data.Text.Extended (toTxt, (<<>), (<>>))
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.Adapter.Backend
import Hasura.Backends.DataConnector.Adapter.Types
import Hasura.Base.Error
import Hasura.Prelude
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.IR.OrderBy
import Hasura.RQL.IR.Select
import Hasura.RQL.IR.Value
import Hasura.RQL.Types.Backend (SessionVarType)
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Relationships.Local (RelInfo (..))
import Hasura.SQL.Backend
import Hasura.SQL.Types (CollectableType (..))
import Hasura.Session
import Language.GraphQL.Draft.Syntax qualified as G
import Witch qualified
--------------------------------------------------------------------------------
data QueryPlan = QueryPlan
{ _qpRequest :: API.QueryRequest,
_qpResponseReshaper :: forall m. (MonadError QErr m) => API.QueryResponse -> m J.Encoding
}
data FieldsAndAggregates = FieldsAndAggregates
{ _faaFields :: HashMap FieldName API.Field,
_faaAggregates :: HashMap FieldName API.Aggregate
}
deriving stock (Show, Eq)
instance Semigroup FieldsAndAggregates where
left <> right =
FieldsAndAggregates
(_faaFields left <> _faaFields right)
(_faaAggregates left <> _faaAggregates right)
instance Monoid FieldsAndAggregates where
mempty = FieldsAndAggregates mempty mempty
newtype FieldPrefix = FieldPrefix (Maybe FieldName)
deriving stock (Show, Eq)
instance Semigroup FieldPrefix where
(FieldPrefix Nothing) <> (FieldPrefix something) = FieldPrefix something
(FieldPrefix something) <> (FieldPrefix Nothing) = FieldPrefix something
(FieldPrefix (Just l)) <> (FieldPrefix (Just r)) = FieldPrefix . Just $ l <> "_" <> r
instance Monoid FieldPrefix where
mempty = FieldPrefix Nothing
noPrefix :: FieldPrefix
noPrefix = FieldPrefix Nothing
prefixWith :: FieldName -> FieldPrefix
prefixWith = FieldPrefix . Just
applyPrefix :: FieldPrefix -> FieldName -> FieldName
applyPrefix (FieldPrefix fieldNamePrefix) fieldName = maybe fieldName (\prefix -> prefix <> "_" <> fieldName) fieldNamePrefix
newtype TableRelationships = TableRelationships
{unTableRelationships :: HashMap API.TableName (HashMap API.RelationshipName API.Relationship)}
deriving stock (Eq, Show)
instance Semigroup TableRelationships where
(TableRelationships l) <> (TableRelationships r) = TableRelationships $ HashMap.unionWith HashMap.union l r
instance Monoid TableRelationships where
mempty = TableRelationships mempty
-- | Render a 'API.QueryRequest' as 'Text'.
--
-- NOTE: This is for logging and debug purposes only.
renderQuery :: API.QueryRequest -> Text
renderQuery =
TE.decodeUtf8 . BL.toStrict . J.encode
-- | Map a 'QueryDB 'DataConnector' term into a 'Plan'
mkPlan ::
forall m.
MonadError QErr m =>
SessionVariables ->
SourceConfig ->
QueryDB 'DataConnector Void (UnpreparedValue 'DataConnector) ->
m QueryPlan
mkPlan session (SourceConfig {}) ir = do
queryRequest <- translateQueryDB ir
pure $ QueryPlan queryRequest (reshapeResponseToQueryShape ir)
where
translateQueryDB ::
QueryDB 'DataConnector Void (UnpreparedValue 'DataConnector) ->
m API.QueryRequest
translateQueryDB =
\case
QDBMultipleRows annSelect -> translateAnnSelectToQueryRequest (translateAnnFields noPrefix) annSelect
QDBSingleRow annSelect -> translateAnnSelectToQueryRequest (translateAnnFields noPrefix) annSelect
QDBAggregation annSelect -> translateAnnSelectToQueryRequest translateTableAggregateFields annSelect
translateAnnSelectToQueryRequest ::
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT TableRelationships m FieldsAndAggregates) ->
AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector) ->
m API.QueryRequest
translateAnnSelectToQueryRequest translateFieldsAndAggregates selectG = do
tableName <- extractTableName selectG
(query, (TableRelationships tableRelationships)) <- CPS.runWriterT (translateAnnSelect translateFieldsAndAggregates tableName selectG)
let apiTableRelationships = uncurry API.TableRelationships <$> HashMap.toList tableRelationships
pure $
API.QueryRequest
{ _qrTable = tableName,
_qrTableRelationships = apiTableRelationships,
_qrQuery = query
}
extractTableName :: AnnSelectG 'DataConnector fieldsType valueType -> m API.TableName
extractTableName selectG =
case _asnFrom selectG of
FromTable tn -> pure $ Witch.from tn
FromIdentifier _ -> throw400 NotSupported "AnnSelectG: FromIdentifier not supported"
FromFunction {} -> throw400 NotSupported "AnnSelectG: FromFunction not supported"
recordTableRelationship :: API.TableName -> API.RelationshipName -> API.Relationship -> CPS.WriterT TableRelationships m ()
recordTableRelationship sourceTableName relationshipName relationship =
CPS.tell . TableRelationships $ HashMap.singleton sourceTableName (HashMap.singleton relationshipName relationship)
translateAnnSelect ::
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT TableRelationships m FieldsAndAggregates) ->
API.TableName ->
AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector) ->
CPS.WriterT TableRelationships m API.Query
translateAnnSelect translateFieldsAndAggregates tableName selectG = do
FieldsAndAggregates {..} <- translateFieldsAndAggregates tableName (_asnFields selectG)
let whereClauseWithPermissions =
case _saWhere (_asnArgs selectG) of
Just expr -> BoolAnd [expr, _tpFilter (_asnPerm selectG)]
Nothing -> _tpFilter (_asnPerm selectG)
whereClause <- translateBoolExpToExpression tableName whereClauseWithPermissions
orderBy <- traverse (translateOrderBy tableName) (_saOrderBy $ _asnArgs selectG)
pure
API.Query
{ _qFields = mapFieldNameHashMap _faaFields,
_qAggregates = mapFieldNameHashMap _faaAggregates,
_qLimit =
fmap getMin $
foldMap
(fmap Min)
[ _saLimit (_asnArgs selectG),
_tpLimit (_asnPerm selectG)
],
_qOffset = fmap fromIntegral (_saOffset (_asnArgs selectG)),
_qWhere = whereClause,
_qOrderBy = orderBy
}
translateOrderBy ::
API.TableName ->
NE.NonEmpty (AnnotatedOrderByItemG 'DataConnector (UnpreparedValue 'DataConnector)) ->
CPS.WriterT TableRelationships m API.OrderBy
translateOrderBy sourceTableName orderByItems = do
orderByElementsAndRelations <- for orderByItems \OrderByItemG {..} -> do
let orderDirection = maybe API.Ascending Witch.from obiType
translateOrderByElement sourceTableName orderDirection [] obiColumn
relations <- lift . mergeOrderByRelations $ snd <$> orderByElementsAndRelations
pure
API.OrderBy
{ _obRelations = relations,
_obElements = fst <$> orderByElementsAndRelations
}
translateOrderByElement ::
API.TableName ->
API.OrderDirection ->
[API.RelationshipName] ->
AnnotatedOrderByElement 'DataConnector (UnpreparedValue 'DataConnector) ->
CPS.WriterT TableRelationships m (API.OrderByElement, HashMap API.RelationshipName API.OrderByRelation)
translateOrderByElement sourceTableName orderDirection targetReversePath = \case
AOCColumn (ColumnInfo {..}) ->
pure
( API.OrderByElement
{ _obeTargetPath = reverse targetReversePath,
_obeTarget = API.OrderByColumn $ Witch.from ciColumn,
_obeOrderDirection = orderDirection
},
mempty
)
AOCObjectRelation relationshipInfo filterExp orderByElement -> do
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
(translatedOrderByElement, subOrderByRelations) <- translateOrderByElement _rTargetTable orderDirection (relationshipName : targetReversePath) orderByElement
targetTableWhereExp <- translateBoolExpToExpression _rTargetTable filterExp
let orderByRelations = HashMap.fromList [(relationshipName, API.OrderByRelation targetTableWhereExp subOrderByRelations)]
pure (translatedOrderByElement, orderByRelations)
AOCArrayAggregation relationshipInfo filterExp aggregateOrderByElement -> do
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
orderByTarget <- case aggregateOrderByElement of
AAOCount ->
pure API.OrderByStarCountAggregate
AAOOp aggFunctionTxt ColumnInfo {..} -> do
aggFunction <- lift $ translateSingleColumnAggregateFunction aggFunctionTxt
pure . API.OrderBySingleColumnAggregate $ API.SingleColumnAggregate aggFunction $ Witch.from ciColumn
let translatedOrderByElement =
API.OrderByElement
{ _obeTargetPath = reverse (relationshipName : targetReversePath),
_obeTarget = orderByTarget,
_obeOrderDirection = orderDirection
}
targetTableWhereExp <- translateBoolExpToExpression _rTargetTable filterExp
let orderByRelations = HashMap.fromList [(relationshipName, API.OrderByRelation targetTableWhereExp mempty)]
pure (translatedOrderByElement, orderByRelations)
mergeOrderByRelations ::
Foldable f =>
f (HashMap API.RelationshipName API.OrderByRelation) ->
m (HashMap API.RelationshipName API.OrderByRelation)
mergeOrderByRelations orderByRelationsList =
foldM mergeMap mempty orderByRelationsList
where
mergeMap :: HashMap API.RelationshipName API.OrderByRelation -> HashMap API.RelationshipName API.OrderByRelation -> m (HashMap API.RelationshipName API.OrderByRelation)
mergeMap left right = foldM (\targetMap (relName, orderByRel) -> HashMap.alterF (maybe (pure $ Just orderByRel) (fmap Just . mergeOrderByRelation orderByRel)) relName targetMap) left $ HashMap.toList right
mergeOrderByRelation :: API.OrderByRelation -> API.OrderByRelation -> m API.OrderByRelation
mergeOrderByRelation right left =
if API._obrWhere left == API._obrWhere right
then do
mergedSubrelations <- mergeMap (API._obrSubrelations left) (API._obrSubrelations right)
pure $ API.OrderByRelation (API._obrWhere left) mergedSubrelations
else throw500 "mergeOrderByRelations: Differing filter expressions found for the same table"
recordTableRelationshipFromRelInfo ::
API.TableName ->
RelInfo 'DataConnector ->
CPS.WriterT TableRelationships m (API.RelationshipName, API.Relationship)
recordTableRelationshipFromRelInfo sourceTableName RelInfo {..} = do
let relationshipName = mkRelationshipName riName
let relationshipType = case riType of
ObjRel -> API.ObjectRelationship
ArrRel -> API.ArrayRelationship
let relationship =
API.Relationship
{ _rTargetTable = Witch.from riRTable,
_rRelationshipType = relationshipType,
_rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList riMapping
}
recordTableRelationship
sourceTableName
relationshipName
relationship
pure (relationshipName, relationship)
translateAnnFields ::
FieldPrefix ->
API.TableName ->
AnnFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT TableRelationships m FieldsAndAggregates
translateAnnFields fieldNamePrefix sourceTableName fields = do
translatedFields <- traverse (traverse (translateAnnField sourceTableName)) fields
let translatedFields' = HashMap.fromList (mapMaybe (\(fieldName, field) -> (applyPrefix fieldNamePrefix fieldName,) <$> field) translatedFields)
pure $
FieldsAndAggregates
translatedFields'
mempty
translateAnnField ::
API.TableName ->
AnnFieldG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT TableRelationships m (Maybe API.Field)
translateAnnField sourceTableName = \case
AFColumn colField ->
-- TODO: make sure certain fields in colField are not in use, since we don't
-- support them
pure . Just $ API.ColumnField (Witch.from $ _acfColumn colField) (Witch.from . columnTypeToScalarType $ _acfType colField)
AFObjectRelation objRel -> do
let targetTable = Witch.from $ _aosTableFrom (_aarAnnSelect objRel)
let relationshipName = mkRelationshipName $ _aarRelationshipName objRel
FieldsAndAggregates {..} <- translateAnnFields noPrefix targetTable (_aosFields (_aarAnnSelect objRel))
whereClause <- translateBoolExpToExpression targetTable (_aosTableFilter (_aarAnnSelect objRel))
recordTableRelationship
sourceTableName
relationshipName
API.Relationship
{ _rTargetTable = targetTable,
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList (_aarColumnMapping objRel)
}
pure . Just . API.RelField $
API.RelationshipField
relationshipName
( API.Query
{ _qFields = mapFieldNameHashMap _faaFields,
_qAggregates = mapFieldNameHashMap _faaAggregates,
_qWhere = whereClause,
_qLimit = Nothing,
_qOffset = Nothing,
_qOrderBy = Nothing
}
)
AFArrayRelation (ASSimple arrayRelationSelect) -> do
Just <$> translateArrayRelationSelect sourceTableName (translateAnnFields noPrefix) arrayRelationSelect
AFArrayRelation (ASAggregate arrayRelationSelect) ->
Just <$> translateArrayRelationSelect sourceTableName translateTableAggregateFields arrayRelationSelect
AFExpression _literal ->
-- We ignore literal text fields (we don't send them to the data connector agent)
-- and add them back to the response JSON when we reshape what the agent returns
-- to us
pure Nothing
translateArrayRelationSelect ::
API.TableName ->
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT TableRelationships m FieldsAndAggregates) ->
AnnRelationSelectG 'DataConnector (AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector)) ->
CPS.WriterT TableRelationships m API.Field
translateArrayRelationSelect sourceTableName translateFieldsAndAggregates arrRel = do
targetTable <- lift $ extractTableName (_aarAnnSelect arrRel)
query <- translateAnnSelect translateFieldsAndAggregates targetTable (_aarAnnSelect arrRel)
let relationshipName = mkRelationshipName $ _aarRelationshipName arrRel
recordTableRelationship
sourceTableName
relationshipName
API.Relationship
{ _rTargetTable = targetTable,
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList (_aarColumnMapping arrRel)
}
pure . API.RelField $
API.RelationshipField
relationshipName
query
translateTableAggregateFields ::
API.TableName ->
TableAggregateFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT TableRelationships m FieldsAndAggregates
translateTableAggregateFields sourceTableName fields = do
mconcat <$> traverse (uncurry (translateTableAggregateField sourceTableName)) fields
translateTableAggregateField ::
API.TableName ->
FieldName ->
TableAggregateFieldG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT TableRelationships m FieldsAndAggregates
translateTableAggregateField sourceTableName fieldName = \case
TAFAgg aggregateFields -> do
let fieldNamePrefix = prefixWith fieldName
translatedAggregateFields <- lift $ mconcat <$> traverse (uncurry (translateAggregateField fieldNamePrefix)) aggregateFields
pure $
FieldsAndAggregates
mempty
translatedAggregateFields
TAFNodes _ fields ->
translateAnnFields (prefixWith fieldName) sourceTableName fields
TAFExp _txt ->
-- We ignore literal text fields (we don't send them to the data connector agent)
-- and add them back to the response JSON when we reshape what the agent returns
-- to us
pure mempty
translateAggregateField ::
FieldPrefix ->
FieldName ->
AggregateField 'DataConnector ->
m (HashMap FieldName API.Aggregate)
translateAggregateField fieldPrefix fieldName = \case
AFCount countAggregate ->
let aggregate =
case countAggregate of
StarCount -> API.StarCount
ColumnCount column -> API.ColumnCount $ API.ColumnCountAggregate {_ccaColumn = Witch.from column, _ccaDistinct = False}
ColumnDistinctCount column -> API.ColumnCount $ API.ColumnCountAggregate {_ccaColumn = Witch.from column, _ccaDistinct = True}
in pure $ HashMap.singleton (applyPrefix fieldPrefix fieldName) aggregate
AFOp AggregateOp {..} -> do
let fieldPrefix' = fieldPrefix <> prefixWith fieldName
aggFunction <- translateSingleColumnAggregateFunction _aoOp
fmap (HashMap.fromList . catMaybes) . forM _aoFields $ \(columnFieldName, columnField) ->
case columnField of
CFCol column _columnType ->
pure . Just $ (applyPrefix fieldPrefix' columnFieldName, API.SingleColumn . API.SingleColumnAggregate aggFunction $ Witch.from column)
CFExp _txt ->
-- We ignore literal text fields (we don't send them to the data connector agent)
-- and add them back to the response JSON when we reshape what the agent returns
-- to us
pure Nothing
AFExp _txt ->
-- We ignore literal text fields (we don't send them to the data connector agent)
-- and add them back to the response JSON when we reshape what the agent returns
-- to us
pure mempty
translateSingleColumnAggregateFunction :: Text -> m API.SingleColumnAggregateFunction
translateSingleColumnAggregateFunction functionName =
fmap API.SingleColumnAggregateFunction (G.mkName functionName)
`onNothing` throw500 ("translateSingleColumnAggregateFunction: Invalid aggregate function encountered: " <> functionName)
prepareLiterals ::
UnpreparedValue 'DataConnector ->
m Literal
prepareLiterals (UVLiteral literal) = pure $ literal
prepareLiterals (UVParameter _ e) = pure (ValueLiteral (columnTypeToScalarType $ cvType e) (cvValue e))
prepareLiterals UVSession = throw400 NotSupported "prepareLiterals: UVSession"
prepareLiterals (UVSessionVar sessionVarType sessionVar) = do
textValue <-
getSessionVariableValue sessionVar session
`onNothing` throw400 NotSupported ("prepareLiterals: session var not found: " <>> sessionVar)
parseSessionVariable sessionVar sessionVarType textValue
parseSessionVariable :: SessionVariable -> SessionVarType 'DataConnector -> Text -> m Literal
parseSessionVariable varName varType varValue = do
case varType of
CollectableTypeScalar scalarType@(ScalarType customTypeName _) ->
parseCustomValue scalarType (customTypeName <> " JSON value")
CollectableTypeArray scalarType@(ScalarType customTypeName _) ->
parseCustomArray scalarType ("JSON array of " <> customTypeName <> " JSON values")
where
parseCustomValue :: ScalarType -> Text -> m Literal
parseCustomValue scalarType description =
case scalarType of
ScalarType _ (Just GraphQLString) ->
-- Special case for string: uses literal session variable value rather than trying to parse a JSON string
pure . ValueLiteral scalarType $ J.String varValue
_ ->
parseValue' (parseValue scalarType) (ValueLiteral scalarType) description
parseCustomArray :: ScalarType -> Text -> m Literal
parseCustomArray scalarType =
parseValue' parser (ArrayLiteral scalarType)
where
parser :: (J.Value -> J.Parser [J.Value])
parser = J.withArray "array of JSON values" (fmap toList . traverse (parseValue scalarType))
parseValue' :: (J.Value -> J.Parser a) -> (a -> Literal) -> Text -> m Literal
parseValue' parser toLiteral description =
toLiteral
<$> (J.eitherDecodeStrict' valValueBS >>= J.parseEither parser)
`onLeft` (\err -> throw400 ParseFailed ("Expected " <> description <> " for session variable " <> varName <<> ". " <> T.pack err))
valValueBS :: BS.ByteString
valValueBS = TE.encodeUtf8 varValue
translateBoolExpToExpression ::
API.TableName ->
AnnBoolExp 'DataConnector (UnpreparedValue 'DataConnector) ->
CPS.WriterT TableRelationships m (Maybe API.Expression)
translateBoolExpToExpression sourceTableName boolExp = do
removeAlwaysTrueExpression <$> translateBoolExp sourceTableName boolExp
translateBoolExp ::
API.TableName ->
AnnBoolExp 'DataConnector (UnpreparedValue 'DataConnector) ->
CPS.WriterT TableRelationships m API.Expression
translateBoolExp sourceTableName = \case
BoolAnd xs ->
mkIfZeroOrMany API.And . mapMaybe removeAlwaysTrueExpression <$> traverse (translateBoolExp sourceTableName) xs
BoolOr xs ->
mkIfZeroOrMany API.Or . mapMaybe removeAlwaysFalseExpression <$> traverse (translateBoolExp sourceTableName) xs
BoolNot x ->
API.Not <$> (translateBoolExp sourceTableName) x
BoolField (AVColumn c xs) ->
lift $ mkIfZeroOrMany API.And <$> traverse (translateOp (Witch.from $ ciColumn c) (Witch.from . columnTypeToScalarType $ ciType c)) xs
BoolField (AVRelationship relationshipInfo boolExp) -> do
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
API.Exists (API.RelatedTable relationshipName) <$> translateBoolExp _rTargetTable boolExp
BoolExists GExists {..} ->
let tableName = Witch.from _geTable
in API.Exists (API.UnrelatedTable tableName) <$> translateBoolExp tableName _geWhere
where
-- Makes an 'API.Expression' like 'API.And' if there is zero or many input expressions otherwise
-- just returns the singleton expression. This helps remove redundant 'API.And' etcs from the expression.
mkIfZeroOrMany :: ([API.Expression] -> API.Expression) -> [API.Expression] -> API.Expression
mkIfZeroOrMany mk = \case
[singleExp] -> singleExp
zeroOrManyExps -> mk zeroOrManyExps
removeAlwaysTrueExpression :: API.Expression -> Maybe API.Expression
removeAlwaysTrueExpression = \case
API.And [] -> Nothing
API.Not (API.Or []) -> Nothing
other -> Just other
removeAlwaysFalseExpression :: API.Expression -> Maybe API.Expression
removeAlwaysFalseExpression = \case
API.Or [] -> Nothing
API.Not (API.And []) -> Nothing
other -> Just other
translateOp ::
API.ColumnName ->
API.ScalarType ->
OpExpG 'DataConnector (UnpreparedValue 'DataConnector) ->
m API.Expression
translateOp columnName columnType opExp = do
preparedOpExp <- traverse prepareLiterals $ opExp
case preparedOpExp of
AEQ _ (ValueLiteral scalarType value) ->
pure $ mkApplyBinaryComparisonOperatorToScalar API.Equal value scalarType
AEQ _ (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for AEQ operator"
ANE _ (ValueLiteral scalarType value) ->
pure . API.Not $ mkApplyBinaryComparisonOperatorToScalar API.Equal value scalarType
ANE _ (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for ANE operator"
AGT (ValueLiteral scalarType value) ->
pure $ mkApplyBinaryComparisonOperatorToScalar API.GreaterThan value scalarType
AGT (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for AGT operator"
ALT (ValueLiteral scalarType value) ->
pure $ mkApplyBinaryComparisonOperatorToScalar API.LessThan value scalarType
ALT (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for ALT operator"
AGTE (ValueLiteral scalarType value) ->
pure $ mkApplyBinaryComparisonOperatorToScalar API.GreaterThanOrEqual value scalarType
AGTE (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for AGTE operator"
ALTE (ValueLiteral scalarType value) ->
pure $ mkApplyBinaryComparisonOperatorToScalar API.LessThanOrEqual value scalarType
ALTE (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for ALTE operator"
ANISNULL ->
pure $ API.ApplyUnaryComparisonOperator API.IsNull currentComparisonColumn
ANISNOTNULL ->
pure $ API.Not (API.ApplyUnaryComparisonOperator API.IsNull currentComparisonColumn)
AIN literal -> pure $ inOperator literal
ANIN literal -> pure . API.Not $ inOperator literal
CEQ rootOrCurrentColumn ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.Equal rootOrCurrentColumn
CNE rootOrCurrentColumn ->
pure $ API.Not $ mkApplyBinaryComparisonOperatorToAnotherColumn API.Equal rootOrCurrentColumn
CGT rootOrCurrentColumn ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.GreaterThan rootOrCurrentColumn
CLT rootOrCurrentColumn ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.LessThan rootOrCurrentColumn
CGTE rootOrCurrentColumn ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.GreaterThanOrEqual rootOrCurrentColumn
CLTE rootOrCurrentColumn ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.LessThanOrEqual rootOrCurrentColumn
ALIKE _literal ->
throw400 NotSupported "The ALIKE operator is not supported by the Data Connector backend"
ANLIKE _literal ->
throw400 NotSupported "The ANLIKE operator is not supported by the Data Connector backend"
ACast _literal ->
throw400 NotSupported "The ACast operator is not supported by the Data Connector backend"
ABackendSpecific CustomBooleanOperator {..} -> case _cboRHS of
Nothing -> pure $ API.ApplyUnaryComparisonOperator (API.CustomUnaryComparisonOperator _cboName) currentComparisonColumn
Just (Left rootOrCurrentColumn) ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn (API.CustomBinaryComparisonOperator _cboName) rootOrCurrentColumn
Just (Right (ValueLiteral scalarType value)) ->
pure $ mkApplyBinaryComparisonOperatorToScalar (API.CustomBinaryComparisonOperator _cboName) value scalarType
Just (Right (ArrayLiteral scalarType array)) ->
pure $ API.ApplyBinaryArrayComparisonOperator (API.CustomBinaryArrayComparisonOperator _cboName) currentComparisonColumn array (Witch.from scalarType)
where
currentComparisonColumn :: API.ComparisonColumn
currentComparisonColumn = API.ComparisonColumn API.CurrentTable columnName columnType
mkApplyBinaryComparisonOperatorToAnotherColumn :: API.BinaryComparisonOperator -> RootOrCurrentColumn 'DataConnector -> API.Expression
mkApplyBinaryComparisonOperatorToAnotherColumn operator (RootOrCurrentColumn rootOrCurrent otherColumnName) =
let columnPath = case rootOrCurrent of
IsRoot -> API.QueryTable
IsCurrent -> API.CurrentTable
in API.ApplyBinaryComparisonOperator operator currentComparisonColumn (API.AnotherColumn $ API.ComparisonColumn columnPath (Witch.from otherColumnName) columnType)
inOperator :: Literal -> API.Expression
inOperator literal =
let (values, scalarType) = case literal of
ArrayLiteral scalarType' array -> (array, scalarType')
ValueLiteral scalarType' value -> ([value], scalarType')
in API.ApplyBinaryArrayComparisonOperator API.In currentComparisonColumn values (Witch.from scalarType)
mkApplyBinaryComparisonOperatorToScalar :: API.BinaryComparisonOperator -> J.Value -> ScalarType -> API.Expression
mkApplyBinaryComparisonOperatorToScalar operator value scalarType =
API.ApplyBinaryComparisonOperator operator currentComparisonColumn (API.ScalarValue value (Witch.from scalarType))
-- | Validate if a 'API.QueryRequest' contains any relationships.
queryHasRelations :: API.QueryRequest -> Bool
queryHasRelations API.QueryRequest {..} = _qrTableRelationships /= mempty
data Cardinality
= Single
| Many
reshapeResponseToQueryShape ::
MonadError QErr m =>
QueryDB 'DataConnector Void (UnpreparedValue 'DataConnector) ->
API.QueryResponse ->
m J.Encoding
reshapeResponseToQueryShape queryDb response =
case queryDb of
QDBMultipleRows simpleSelect -> reshapeSimpleSelectRows Many (_asnFields simpleSelect) response
QDBSingleRow simpleSelect -> reshapeSimpleSelectRows Single (_asnFields simpleSelect) response
QDBAggregation aggregateSelect -> reshapeTableAggregateFields (_asnFields aggregateSelect) response
reshapeSimpleSelectRows ::
MonadError QErr m =>
Cardinality ->
AnnFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
API.QueryResponse ->
m J.Encoding
reshapeSimpleSelectRows cardinality fields API.QueryResponse {..} =
case cardinality of
Single ->
case rows of
[] -> pure $ J.toEncoding J.Null
[singleRow] -> reshapeAnnFields noPrefix fields singleRow
_multipleRows ->
throw500 "Data Connector agent returned multiple rows when only one was expected" -- TODO(dchambers): Add pathing information for error clarity
Many -> do
reshapedRows <- traverse (reshapeAnnFields noPrefix fields) rows
pure $ JE.list id reshapedRows
where
rows = fromMaybe mempty _qrRows
reshapeTableAggregateFields ::
MonadError QErr m =>
TableAggregateFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
API.QueryResponse ->
m J.Encoding
reshapeTableAggregateFields tableAggregateFields API.QueryResponse {..} = do
reshapedFields <- forM tableAggregateFields $ \(fieldName@(FieldName fieldNameText), tableAggregateField) -> do
case tableAggregateField of
TAFAgg aggregateFields -> do
reshapedAggregateFields <- reshapeAggregateFields (prefixWith fieldName) aggregateFields responseAggregates
pure $ (fieldNameText, reshapedAggregateFields)
TAFNodes _ annFields -> do
reshapedRows <- traverse (reshapeAnnFields (prefixWith fieldName) annFields) responseRows
pure $ (fieldNameText, JE.list id reshapedRows)
TAFExp txt ->
pure $ (fieldNameText, JE.text txt)
pure $ encodeAssocListAsObject reshapedFields
where
responseRows = fromMaybe mempty _qrRows
responseAggregates = fromMaybe mempty _qrAggregates
reshapeAggregateFields ::
MonadError QErr m =>
FieldPrefix ->
AggregateFields 'DataConnector ->
HashMap API.FieldName J.Value ->
m J.Encoding
reshapeAggregateFields fieldPrefix aggregateFields responseAggregates = do
reshapedFields <- forM aggregateFields $ \(fieldName@(FieldName fieldNameText), aggregateField) ->
case aggregateField of
AFCount _countAggregate -> do
let fieldNameKey = API.FieldName . getFieldNameTxt $ applyPrefix fieldPrefix fieldName
responseAggregateValue <-
HashMap.lookup fieldNameKey responseAggregates
`onNothing` throw500 ("Unable to find expected aggregate " <> API.unFieldName fieldNameKey <> " in aggregates returned by Data Connector agent") -- TODO(dchambers): Add pathing information for error clarity
pure (fieldNameText, J.toEncoding responseAggregateValue)
AFOp AggregateOp {..} -> do
reshapedColumnFields <- forM _aoFields $ \(columnFieldName@(FieldName columnFieldNameText), columnField) ->
case columnField of
CFCol _column _columnType -> do
let fieldPrefix' = fieldPrefix <> prefixWith fieldName
let columnFieldNameKey = API.FieldName . getFieldNameTxt $ applyPrefix fieldPrefix' columnFieldName
responseAggregateValue <-
HashMap.lookup columnFieldNameKey responseAggregates
`onNothing` throw500 ("Unable to find expected aggregate " <> API.unFieldName columnFieldNameKey <> " in aggregates returned by Data Connector agent") -- TODO(dchambers): Add pathing information for error clarity
pure (columnFieldNameText, J.toEncoding responseAggregateValue)
CFExp txt ->
pure (columnFieldNameText, JE.text txt)
pure (fieldNameText, encodeAssocListAsObject reshapedColumnFields)
AFExp txt ->
pure (fieldNameText, JE.text txt)
pure $ encodeAssocListAsObject reshapedFields
reshapeAnnFields ::
MonadError QErr m =>
FieldPrefix ->
AnnFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
HashMap API.FieldName API.FieldValue ->
m J.Encoding
reshapeAnnFields fieldNamePrefix fields responseRow = do
reshapedFields <- forM fields $ \(fieldName@(FieldName fieldNameText), field) -> do
let fieldNameKey = API.FieldName . getFieldNameTxt $ applyPrefix fieldNamePrefix fieldName
let responseField =
HashMap.lookup fieldNameKey responseRow
`onNothing` throw500 ("Unable to find expected field " <> API.unFieldName fieldNameKey <> " in row returned by Data Connector agent") -- TODO(dchambers): Add pathing information for error clarity
reshapedField <- reshapeField field responseField
pure (fieldNameText, reshapedField)
pure $ encodeAssocListAsObject reshapedFields
encodeAssocListAsObject :: [(Text, J.Encoding)] -> J.Encoding
encodeAssocListAsObject =
JE.dict
JE.text
id
(\fn -> foldr (uncurry fn))
reshapeField ::
MonadError QErr m =>
AnnFieldG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
m API.FieldValue -> -- This lookup is lazy (behind the monad) so that we can _not_ do it when we've got an AFExpression
m J.Encoding
reshapeField field responseFieldValue =
case field of
AFColumn _columnField -> do
columnFieldValue <- API.deserializeAsColumnFieldValue <$> responseFieldValue
pure $ J.toEncoding columnFieldValue
AFObjectRelation objectRelationField -> do
relationshipFieldValue <- API.deserializeAsRelationshipFieldValue <$> responseFieldValue
case relationshipFieldValue of
Left err -> throw500 $ "Found column field value where relationship field value was expected in field returned by Data Connector agent: " <> err -- TODO(dchambers): Add pathing information for error clarity
Right subqueryResponse ->
let fields = _aosFields $ _aarAnnSelect objectRelationField
in reshapeSimpleSelectRows Single fields subqueryResponse
AFArrayRelation (ASSimple simpleArrayRelationField) ->
reshapeAnnRelationSelect (reshapeSimpleSelectRows Many) simpleArrayRelationField =<< responseFieldValue
AFArrayRelation (ASAggregate aggregateArrayRelationField) ->
reshapeAnnRelationSelect reshapeTableAggregateFields aggregateArrayRelationField =<< responseFieldValue
AFExpression txt -> pure $ JE.text txt
reshapeAnnRelationSelect ::
MonadError QErr m =>
(Fields (fieldType (UnpreparedValue 'DataConnector)) -> API.QueryResponse -> m J.Encoding) ->
AnnRelationSelectG 'DataConnector (AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector)) ->
API.FieldValue ->
m J.Encoding
reshapeAnnRelationSelect reshapeFields annRelationSelect fieldValue =
case API.deserializeAsRelationshipFieldValue fieldValue of
Left err -> throw500 $ "Found column field value where relationship field value was expected in field returned by Data Connector agent: " <> err -- TODO(dchambers): Add pathing information for error clarity
Right subqueryResponse ->
let annSimpleSelect = _aarAnnSelect annRelationSelect
in reshapeFields (_asnFields annSimpleSelect) subqueryResponse
mapFieldNameHashMap :: Eq v => HashMap FieldName v -> Maybe (HashMap API.FieldName v)
mapFieldNameHashMap = memptyToNothing . HashMap.mapKeys (API.FieldName . getFieldNameTxt)
memptyToNothing :: (Monoid m, Eq m) => m -> Maybe m
memptyToNothing m = if m == mempty then Nothing else Just m
mkRelationshipName :: RelName -> API.RelationshipName
mkRelationshipName relName = API.RelationshipName $ toTxt relName

View File

@ -0,0 +1,363 @@
-- | This module contains Data Connector request/response planning code and utility
-- functions and types that are common across the different categories of requests
-- (ie queries, mutations, etc). It contains code and concepts that are independent
-- of these different categories.
--
-- Both 'Hasura.Backends.DataConnector.Plan.QueryPlan' and
-- 'Hasura.Backends.DataConnector.Plan.MutationPlan' use the contents of this module,
-- for example 'Hasura.Backends.DataConnector.Plan.QueryPlan.mkQueryPlan`.
module Hasura.Backends.DataConnector.Plan.Common
( Plan (..),
TableRelationships (..),
FieldPrefix,
noPrefix,
prefixWith,
applyPrefix,
Cardinality (..),
recordTableRelationship,
recordTableRelationshipFromRelInfo,
prepareLiteral,
translateBoolExpToExpression,
mkRelationshipName,
encodeAssocListAsObject,
)
where
import Control.Monad.Trans.Writer.CPS qualified as CPS
import Data.Aeson qualified as J
import Data.Aeson.Encoding qualified as JE
import Data.Aeson.Types qualified as J
import Data.Bifunctor (Bifunctor (bimap))
import Data.ByteString qualified as BS
import Data.Has (Has (modifier))
import Data.HashMap.Strict qualified as HashMap
import Data.Text qualified as T
import Data.Text.Encoding qualified as TE
import Data.Text.Extended (toTxt, (<<>), (<>>))
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.Adapter.Backend
import Hasura.Backends.DataConnector.Adapter.Types
import Hasura.Base.Error
import Hasura.Prelude
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.IR.Value
import Hasura.RQL.Types.Backend (SessionVarType)
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Relationships.Local (RelInfo (..))
import Hasura.SQL.Backend
import Hasura.SQL.Types (CollectableType (..))
import Hasura.Session
import Witch qualified
--------------------------------------------------------------------------------
-- | Represents a 'request' to be sent to a data connector agent ('_pRequest') and a function
-- that is capable of reshaping the response to that request into the final JSON form expected
-- to be returned by the GraphQL endpoint ('_pResponseReshaper').
data Plan request response = Plan
{ _pRequest :: request,
_pResponseReshaper :: forall m. (MonadError QErr m) => response -> m J.Encoding
}
--------------------------------------------------------------------------------
-- | A monoidal data structure used to record Table Relationships encountered during request
-- translation. Used with 'recordTableRelationship'.
newtype TableRelationships = TableRelationships
{unTableRelationships :: HashMap API.TableName (HashMap API.RelationshipName API.Relationship)}
deriving stock (Eq, Show)
instance Semigroup TableRelationships where
(TableRelationships l) <> (TableRelationships r) = TableRelationships $ HashMap.unionWith HashMap.union l r
instance Monoid TableRelationships where
mempty = TableRelationships mempty
-- | Records a table relationship encountered during request translation into the output of the current
-- 'CPS.WriterT'
recordTableRelationship ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
API.TableName ->
API.RelationshipName ->
API.Relationship ->
CPS.WriterT writerOutput m ()
recordTableRelationship sourceTableName relationshipName relationship =
let newRelationship = TableRelationships $ HashMap.singleton sourceTableName (HashMap.singleton relationshipName relationship)
in CPS.tell $ modifier (const newRelationship) mempty
recordTableRelationshipFromRelInfo ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
API.TableName ->
RelInfo 'DataConnector ->
CPS.WriterT writerOutput m (API.RelationshipName, API.Relationship)
recordTableRelationshipFromRelInfo sourceTableName RelInfo {..} = do
let relationshipName = mkRelationshipName riName
let relationshipType = case riType of
ObjRel -> API.ObjectRelationship
ArrRel -> API.ArrayRelationship
let relationship =
API.Relationship
{ _rTargetTable = Witch.from riRTable,
_rRelationshipType = relationshipType,
_rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList riMapping
}
recordTableRelationship
sourceTableName
relationshipName
relationship
pure (relationshipName, relationship)
--------------------------------------------------------------------------------
-- | Represents a potential prefix that can be applied to a field name, useful for
-- namespacing field names that may be otherwise duplicated.
newtype FieldPrefix = FieldPrefix (Maybe FieldName)
deriving stock (Show, Eq)
instance Semigroup FieldPrefix where
(FieldPrefix Nothing) <> (FieldPrefix something) = FieldPrefix something
(FieldPrefix something) <> (FieldPrefix Nothing) = FieldPrefix something
(FieldPrefix (Just l)) <> (FieldPrefix (Just r)) = FieldPrefix . Just $ l <> "_" <> r
instance Monoid FieldPrefix where
mempty = FieldPrefix Nothing
noPrefix :: FieldPrefix
noPrefix = FieldPrefix Nothing
prefixWith :: FieldName -> FieldPrefix
prefixWith = FieldPrefix . Just
applyPrefix :: FieldPrefix -> FieldName -> FieldName
applyPrefix (FieldPrefix fieldNamePrefix) fieldName = maybe fieldName (\prefix -> prefix <> "_" <> fieldName) fieldNamePrefix
--------------------------------------------------------------------------------
data Cardinality
= Single
| Many
--------------------------------------------------------------------------------
prepareLiteral ::
MonadError QErr m =>
SessionVariables ->
UnpreparedValue 'DataConnector ->
m Literal
prepareLiteral sessionVariables = \case
UVLiteral literal -> pure $ literal
UVParameter _ e -> pure (ValueLiteral (columnTypeToScalarType $ cvType e) (cvValue e))
UVSession -> throw400 NotSupported "prepareLiteral: UVSession"
UVSessionVar sessionVarType sessionVar -> do
textValue <-
getSessionVariableValue sessionVar sessionVariables
`onNothing` throw400 NotSupported ("prepareLiteral: session var not found: " <>> sessionVar)
parseSessionVariable sessionVar sessionVarType textValue
parseSessionVariable ::
forall m.
MonadError QErr m =>
SessionVariable ->
SessionVarType 'DataConnector ->
Text ->
m Literal
parseSessionVariable varName varType varValue = do
case varType of
CollectableTypeScalar scalarType@(ScalarType customTypeName _) ->
parseCustomValue scalarType (customTypeName <> " JSON value")
CollectableTypeArray scalarType@(ScalarType customTypeName _) ->
parseCustomArray scalarType ("JSON array of " <> customTypeName <> " JSON values")
where
parseCustomValue :: ScalarType -> Text -> m Literal
parseCustomValue scalarType description =
case scalarType of
ScalarType _ (Just GraphQLString) ->
-- Special case for string: uses literal session variable value rather than trying to parse a JSON string
pure . ValueLiteral scalarType $ J.String varValue
_ ->
parseValue' (parseValue scalarType) (ValueLiteral scalarType) description
parseCustomArray :: ScalarType -> Text -> m Literal
parseCustomArray scalarType =
parseValue' parser (ArrayLiteral scalarType)
where
parser :: (J.Value -> J.Parser [J.Value])
parser = J.withArray "array of JSON values" (fmap toList . traverse (parseValue scalarType))
parseValue' :: (J.Value -> J.Parser a) -> (a -> Literal) -> Text -> m Literal
parseValue' parser toLiteral description =
toLiteral
<$> (J.eitherDecodeStrict' valValueBS >>= J.parseEither parser)
`onLeft` (\err -> throw400 ParseFailed ("Expected " <> description <> " for session variable " <> varName <<> ". " <> T.pack err))
valValueBS :: BS.ByteString
valValueBS = TE.encodeUtf8 varValue
--------------------------------------------------------------------------------
translateBoolExpToExpression ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
AnnBoolExp 'DataConnector (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m (Maybe API.Expression)
translateBoolExpToExpression sessionVariables sourceTableName boolExp = do
removeAlwaysTrueExpression <$> translateBoolExp sessionVariables sourceTableName boolExp
translateBoolExp ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
AnnBoolExp 'DataConnector (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m API.Expression
translateBoolExp sessionVariables sourceTableName = \case
BoolAnd xs ->
mkIfZeroOrMany API.And . mapMaybe removeAlwaysTrueExpression <$> traverse (translateBoolExp' sourceTableName) xs
BoolOr xs ->
mkIfZeroOrMany API.Or . mapMaybe removeAlwaysFalseExpression <$> traverse (translateBoolExp' sourceTableName) xs
BoolNot x ->
API.Not <$> (translateBoolExp' sourceTableName) x
BoolField (AVColumn c xs) ->
lift $ mkIfZeroOrMany API.And <$> traverse (translateOp sessionVariables (Witch.from $ ciColumn c) (Witch.from . columnTypeToScalarType $ ciType c)) xs
BoolField (AVRelationship relationshipInfo boolExp) -> do
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
API.Exists (API.RelatedTable relationshipName) <$> translateBoolExp' _rTargetTable boolExp
BoolExists GExists {..} ->
let tableName = Witch.from _geTable
in API.Exists (API.UnrelatedTable tableName) <$> translateBoolExp' tableName _geWhere
where
translateBoolExp' = translateBoolExp sessionVariables
-- Makes an 'API.Expression' like 'API.And' if there is zero or many input expressions otherwise
-- just returns the singleton expression. This helps remove redundant 'API.And' etcs from the expression.
mkIfZeroOrMany :: ([API.Expression] -> API.Expression) -> [API.Expression] -> API.Expression
mkIfZeroOrMany mk = \case
[singleExp] -> singleExp
zeroOrManyExps -> mk zeroOrManyExps
removeAlwaysTrueExpression :: API.Expression -> Maybe API.Expression
removeAlwaysTrueExpression = \case
API.And [] -> Nothing
API.Not (API.Or []) -> Nothing
other -> Just other
removeAlwaysFalseExpression :: API.Expression -> Maybe API.Expression
removeAlwaysFalseExpression = \case
API.Or [] -> Nothing
API.Not (API.And []) -> Nothing
other -> Just other
translateOp ::
MonadError QErr m =>
SessionVariables ->
API.ColumnName ->
API.ScalarType ->
OpExpG 'DataConnector (UnpreparedValue 'DataConnector) ->
m API.Expression
translateOp sessionVariables columnName columnType opExp = do
preparedOpExp <- traverse (prepareLiteral sessionVariables) $ opExp
case preparedOpExp of
AEQ _ (ValueLiteral scalarType value) ->
pure $ mkApplyBinaryComparisonOperatorToScalar API.Equal value scalarType
AEQ _ (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for AEQ operator"
ANE _ (ValueLiteral scalarType value) ->
pure . API.Not $ mkApplyBinaryComparisonOperatorToScalar API.Equal value scalarType
ANE _ (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for ANE operator"
AGT (ValueLiteral scalarType value) ->
pure $ mkApplyBinaryComparisonOperatorToScalar API.GreaterThan value scalarType
AGT (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for AGT operator"
ALT (ValueLiteral scalarType value) ->
pure $ mkApplyBinaryComparisonOperatorToScalar API.LessThan value scalarType
ALT (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for ALT operator"
AGTE (ValueLiteral scalarType value) ->
pure $ mkApplyBinaryComparisonOperatorToScalar API.GreaterThanOrEqual value scalarType
AGTE (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for AGTE operator"
ALTE (ValueLiteral scalarType value) ->
pure $ mkApplyBinaryComparisonOperatorToScalar API.LessThanOrEqual value scalarType
ALTE (ArrayLiteral _scalarType _array) ->
throw400 NotSupported "Array literals not supported for ALTE operator"
ANISNULL ->
pure $ API.ApplyUnaryComparisonOperator API.IsNull currentComparisonColumn
ANISNOTNULL ->
pure $ API.Not (API.ApplyUnaryComparisonOperator API.IsNull currentComparisonColumn)
AIN literal -> pure $ inOperator literal
ANIN literal -> pure . API.Not $ inOperator literal
CEQ rootOrCurrentColumn ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.Equal rootOrCurrentColumn
CNE rootOrCurrentColumn ->
pure $ API.Not $ mkApplyBinaryComparisonOperatorToAnotherColumn API.Equal rootOrCurrentColumn
CGT rootOrCurrentColumn ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.GreaterThan rootOrCurrentColumn
CLT rootOrCurrentColumn ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.LessThan rootOrCurrentColumn
CGTE rootOrCurrentColumn ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.GreaterThanOrEqual rootOrCurrentColumn
CLTE rootOrCurrentColumn ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn API.LessThanOrEqual rootOrCurrentColumn
ALIKE _literal ->
throw400 NotSupported "The ALIKE operator is not supported by the Data Connector backend"
ANLIKE _literal ->
throw400 NotSupported "The ANLIKE operator is not supported by the Data Connector backend"
ACast _literal ->
throw400 NotSupported "The ACast operator is not supported by the Data Connector backend"
ABackendSpecific CustomBooleanOperator {..} -> case _cboRHS of
Nothing -> pure $ API.ApplyUnaryComparisonOperator (API.CustomUnaryComparisonOperator _cboName) currentComparisonColumn
Just (Left rootOrCurrentColumn) ->
pure $ mkApplyBinaryComparisonOperatorToAnotherColumn (API.CustomBinaryComparisonOperator _cboName) rootOrCurrentColumn
Just (Right (ValueLiteral scalarType value)) ->
pure $ mkApplyBinaryComparisonOperatorToScalar (API.CustomBinaryComparisonOperator _cboName) value scalarType
Just (Right (ArrayLiteral scalarType array)) ->
pure $ API.ApplyBinaryArrayComparisonOperator (API.CustomBinaryArrayComparisonOperator _cboName) currentComparisonColumn array (Witch.from scalarType)
where
currentComparisonColumn :: API.ComparisonColumn
currentComparisonColumn = API.ComparisonColumn API.CurrentTable columnName columnType
mkApplyBinaryComparisonOperatorToAnotherColumn :: API.BinaryComparisonOperator -> RootOrCurrentColumn 'DataConnector -> API.Expression
mkApplyBinaryComparisonOperatorToAnotherColumn operator (RootOrCurrentColumn rootOrCurrent otherColumnName) =
let columnPath = case rootOrCurrent of
IsRoot -> API.QueryTable
IsCurrent -> API.CurrentTable
in API.ApplyBinaryComparisonOperator operator currentComparisonColumn (API.AnotherColumn $ API.ComparisonColumn columnPath (Witch.from otherColumnName) columnType)
inOperator :: Literal -> API.Expression
inOperator literal =
let (values, scalarType) = case literal of
ArrayLiteral scalarType' array -> (array, scalarType')
ValueLiteral scalarType' value -> ([value], scalarType')
in API.ApplyBinaryArrayComparisonOperator API.In currentComparisonColumn values (Witch.from scalarType)
mkApplyBinaryComparisonOperatorToScalar :: API.BinaryComparisonOperator -> J.Value -> ScalarType -> API.Expression
mkApplyBinaryComparisonOperatorToScalar operator value scalarType =
API.ApplyBinaryComparisonOperator operator currentComparisonColumn (API.ScalarValue value (Witch.from scalarType))
--------------------------------------------------------------------------------
mkRelationshipName :: RelName -> API.RelationshipName
mkRelationshipName relName = API.RelationshipName $ toTxt relName
--------------------------------------------------------------------------------
encodeAssocListAsObject :: [(Text, J.Encoding)] -> J.Encoding
encodeAssocListAsObject =
JE.dict
JE.text
id
(\fn -> foldr (uncurry fn))

View File

@ -0,0 +1,259 @@
module Hasura.Backends.DataConnector.Plan.MutationPlan
( mkMutationPlan,
)
where
import Control.Monad.Trans.Writer.CPS qualified as CPS
import Data.Aeson qualified as J
import Data.Aeson.Encoding qualified as JE
import Data.Has (Has, modifier)
import Data.HashMap.Strict qualified as HashMap
import Data.Text.Extended (toTxt)
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.Adapter.Types
import Hasura.Backends.DataConnector.Plan.Common
import Hasura.Backends.DataConnector.Plan.QueryPlan (reshapeAnnFields, translateAnnFields)
import Hasura.Base.Error (Code (..), QErr, throw400, throw500)
import Hasura.Prelude
import Hasura.RQL.IR.Delete
import Hasura.RQL.IR.Insert hiding (Single)
import Hasura.RQL.IR.Returning
import Hasura.RQL.IR.Root
import Hasura.RQL.IR.Select
import Hasura.RQL.IR.Update
import Hasura.RQL.IR.Value
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.SQL.Backend (BackendType (..))
import Hasura.Session (SessionVariables)
import Language.GraphQL.Draft.Syntax qualified as G
import Witch qualified
--------------------------------------------------------------------------------
newtype TableInsertSchemas = TableInsertSchemas
{unTableInsertSchemas :: HashMap API.TableName (HashMap API.FieldName API.InsertFieldSchema)}
deriving stock (Eq, Show)
instance Semigroup TableInsertSchemas where
(TableInsertSchemas l) <> (TableInsertSchemas r) = TableInsertSchemas $ HashMap.unionWith HashMap.union l r
instance Monoid TableInsertSchemas where
mempty = TableInsertSchemas mempty
recordTableInsertSchema ::
( Has TableInsertSchemas writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
API.TableName ->
HashMap API.FieldName API.InsertFieldSchema ->
CPS.WriterT writerOutput m ()
recordTableInsertSchema tableName fieldSchemas =
let newTableSchema = TableInsertSchemas $ HashMap.singleton tableName fieldSchemas
in CPS.tell $ modifier (const newTableSchema) mempty
--------------------------------------------------------------------------------
mkMutationPlan ::
MonadError QErr m =>
SessionVariables ->
MutationDB 'DataConnector Void (UnpreparedValue 'DataConnector) ->
m (Plan API.MutationRequest API.MutationResponse)
mkMutationPlan sessionVariables mutationDB = do
request <- translateMutationDB sessionVariables mutationDB
pure $ Plan request (reshapeResponseToMutationGqlShape mutationDB)
translateMutationDB ::
MonadError QErr m =>
SessionVariables ->
MutationDB 'DataConnector Void (UnpreparedValue 'DataConnector) ->
m API.MutationRequest
translateMutationDB sessionVariables = \case
MDBInsert insert -> do
(insertOperation, (tableRelationships, tableInsertSchemas)) <- CPS.runWriterT $ translateInsert sessionVariables insert
let apiTableRelationships = uncurry API.TableRelationships <$> HashMap.toList (unTableRelationships tableRelationships)
let apiTableInsertSchema = uncurry API.TableInsertSchema <$> HashMap.toList (unTableInsertSchemas tableInsertSchemas)
pure $
API.MutationRequest
{ _mrTableRelationships = apiTableRelationships,
_mrInsertSchema = apiTableInsertSchema,
_mrOperations = [API.InsertOperation insertOperation]
}
MDBUpdate _update ->
throw400 NotSupported "translateMutationDB: update mutations not implemented for the Data Connector backend."
MDBDelete _delete ->
throw400 NotSupported "translateMutationDB: delete mutations not implemented for the Data Connector backend."
MDBFunction _returnsSet _select ->
throw400 NotSupported "translateMutationDB: function mutations not implemented for the Data Connector backend."
translateInsert ::
MonadError QErr m =>
SessionVariables ->
AnnotatedInsert 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT (TableRelationships, TableInsertSchemas) m API.InsertMutationOperation
translateInsert sessionVariables AnnotatedInsert {..} = do
rows <- traverse (translateInsertRow sessionVariables tableName _aiTableColumns _aiPresetValues) _aiInsertObject
postInsertCheck <- translateBoolExpToExpression sessionVariables tableName insertCheckCondition
returningFields <- translateMutationOutputToReturningFields sessionVariables tableName _aiOutput
pure $
API.InsertMutationOperation
{ API._imoTable = tableName,
API._imoRows = rows,
API._imoPostInsertCheck = postInsertCheck,
API._imoReturningFields = HashMap.mapKeys (API.FieldName . getFieldNameTxt) returningFields
}
where
tableName = Witch.from _aiTableName
insertCheckCondition = fst _aiCheckCondition
AnnotatedInsertData {..} = _aiData
translateInsertRow ::
( Has TableInsertSchemas writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
[ColumnInfo 'DataConnector] ->
HashMap ColumnName (UnpreparedValue 'DataConnector) ->
AnnotatedInsertRow 'DataConnector (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m API.RowObject
translateInsertRow sessionVariables tableName tableColumns defaultColumnValues insertRow = do
columnSchemasAndValues <- lift $ forM (HashMap.toList columnUnpreparedValues) $ \(columnName, columnValue) -> do
fieldName <-
case find (\ColumnInfo {..} -> ciColumn == columnName) tableColumns of
Just ColumnInfo {..} -> pure . API.FieldName $ G.unName ciName
Nothing -> throw500 $ "Can't find column " <> toTxt columnName <> " in table schema for " <> API.tableNameToText tableName
preparedLiteral <- prepareLiteral sessionVariables columnValue
(scalarType, value) <-
case preparedLiteral of
ValueLiteral scalarType value -> pure (scalarType, value)
ArrayLiteral _scalarType _values -> throw400 NotSupported "translateInsertRow: Array literals are not supported as column insert values"
let columnInsertSchema = API.ColumnInsertSchema (Witch.from columnName) (Witch.from scalarType)
pure (fieldName, columnInsertSchema, value)
let fieldSchemas =
columnSchemasAndValues
& fmap (\(fieldName, columnInsertSchema, _) -> (fieldName, API.ColumnInsert columnInsertSchema))
& HashMap.fromList
recordTableInsertSchema tableName fieldSchemas
let rowObject =
columnSchemasAndValues
& fmap (\(fieldName, _, value) -> (fieldName, API.mkColumnInsertFieldValue value))
& HashMap.fromList
& API.RowObject
pure rowObject
where
columnUnpreparedValues :: HashMap ColumnName (UnpreparedValue 'DataConnector)
columnUnpreparedValues = HashMap.union rowColumnValues defaultColumnValues
rowColumnValues :: HashMap ColumnName (UnpreparedValue 'DataConnector)
rowColumnValues =
insertRow
& fmap (\(AIColumn columnNameAndValue) -> columnNameAndValue)
& HashMap.fromList
translateMutationOutputToReturningFields ::
MonadError QErr m =>
SessionVariables ->
API.TableName ->
MutationOutputG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT (TableRelationships, TableInsertSchemas) m (HashMap FieldName API.Field)
translateMutationOutputToReturningFields sessionVariables tableName = \case
MOutSinglerowObject annFields ->
translateAnnFields sessionVariables noPrefix tableName annFields
MOutMultirowFields mutFields ->
HashMap.unions <$> traverse (uncurry $ translateMutField sessionVariables tableName) mutFields
translateMutField ::
MonadError QErr m =>
SessionVariables ->
API.TableName ->
FieldName ->
MutFldG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT (TableRelationships, TableInsertSchemas) m (HashMap FieldName API.Field)
translateMutField sessionVariables tableName fieldName = \case
MCount ->
-- All mutation operations in a request return their affected rows count.
-- The count can just be added to the response JSON during agent response reshaping
pure mempty
MExp _text ->
-- We ignore literal text fields (we don't send them to the data connector agent)
-- and add them back to the response JSON when we reshape what the agent returns
-- to us
pure mempty
MRet annFields ->
translateAnnFields sessionVariables (prefixWith fieldName) tableName annFields
--------------------------------------------------------------------------------
reshapeResponseToMutationGqlShape ::
MonadError QErr m =>
MutationDB 'DataConnector Void v ->
API.MutationResponse ->
m J.Encoding
reshapeResponseToMutationGqlShape mutationDb API.MutationResponse {..} = do
mutationOperationResult <-
listToMaybe _mrOperationResults
`onNothing` throw500 "Unable to find expected mutation operation results"
mutationOutput <-
case mutationDb of
MDBInsert AnnotatedInsert {..} -> pure _aiOutput
MDBUpdate AnnotatedUpdateG {..} -> pure _auOutput
MDBDelete AnnDel {..} -> pure _adOutput
MDBFunction _returnsSet _select -> throw400 NotSupported "reshapeResponseToMutationGqlShape: function mutations not implemented for the Data Connector backend."
reshapeMutationOutput mutationOutput mutationOperationResult
reshapeMutationOutput ::
MonadError QErr m =>
MutationOutputG 'DataConnector Void v ->
API.MutationOperationResults ->
m J.Encoding
reshapeMutationOutput mutationOutput mutationOperationResults =
case mutationOutput of
MOutSinglerowObject annFields -> reshapeReturningRows Single noPrefix annFields mutationOperationResults
MOutMultirowFields mutFields -> reshapeMutFields mutFields mutationOperationResults
reshapeReturningRows ::
MonadError QErr m =>
Cardinality ->
FieldPrefix ->
AnnFieldsG 'DataConnector Void v ->
API.MutationOperationResults ->
m J.Encoding
reshapeReturningRows cardinality fieldNamePrefix annFields API.MutationOperationResults {..} =
case cardinality of
Single ->
case rows of
[] -> pure $ J.toEncoding J.Null
[singleRow] -> reshapeAnnFields fieldNamePrefix annFields singleRow
_multipleRows ->
throw500 "Data Connector agent returned multiple rows in a mutation operation result when only one was expected"
Many -> do
reshapedRows <- traverse (reshapeAnnFields fieldNamePrefix annFields) rows
pure $ JE.list id reshapedRows
where
rows = fromMaybe mempty _morReturning
reshapeMutFields ::
MonadError QErr m =>
MutFldsG 'DataConnector Void v ->
API.MutationOperationResults ->
m J.Encoding
reshapeMutFields mutFields mutationOperationResults@API.MutationOperationResults {..} = do
reshapedMutFields <- forM mutFields $ \(fieldName@(FieldName fieldNameText), mutField) -> do
reshapedFieldValue <-
case mutField of
MCount -> pure $ JE.int _morAffectedRows
MExp literalText -> pure $ JE.text literalText
MRet annFields -> reshapeReturningRows Many (prefixWith fieldName) annFields mutationOperationResults
pure (fieldNameText, reshapedFieldValue)
pure $ encodeAssocListAsObject reshapedMutFields

View File

@ -0,0 +1,554 @@
module Hasura.Backends.DataConnector.Plan.QueryPlan
( mkQueryPlan,
queryHasRelations,
translateAnnFields,
reshapeAnnFields,
)
where
--------------------------------------------------------------------------------
import Control.Monad.Trans.Writer.CPS qualified as CPS
import Data.Aeson qualified as J
import Data.Aeson.Encoding qualified as JE
import Data.Bifunctor (Bifunctor (bimap))
import Data.Has (Has)
import Data.HashMap.Strict qualified as HashMap
import Data.List.NonEmpty qualified as NE
import Data.Semigroup (Min (..))
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.Adapter.Backend
import Hasura.Backends.DataConnector.Adapter.Types
import Hasura.Backends.DataConnector.Plan.Common
import Hasura.Base.Error
import Hasura.Prelude
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.IR.OrderBy
import Hasura.RQL.IR.Select
import Hasura.RQL.IR.Value
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.SQL.Backend
import Hasura.Session
import Language.GraphQL.Draft.Syntax qualified as G
import Witch qualified
--------------------------------------------------------------------------------
data FieldsAndAggregates = FieldsAndAggregates
{ _faaFields :: HashMap FieldName API.Field,
_faaAggregates :: HashMap FieldName API.Aggregate
}
deriving stock (Show, Eq)
instance Semigroup FieldsAndAggregates where
left <> right =
FieldsAndAggregates
(_faaFields left <> _faaFields right)
(_faaAggregates left <> _faaAggregates right)
instance Monoid FieldsAndAggregates where
mempty = FieldsAndAggregates mempty mempty
--------------------------------------------------------------------------------
-- | Map a 'QueryDB 'DataConnector' term into a 'Plan'
mkQueryPlan ::
forall m.
MonadError QErr m =>
SessionVariables ->
SourceConfig ->
QueryDB 'DataConnector Void (UnpreparedValue 'DataConnector) ->
m (Plan API.QueryRequest API.QueryResponse)
mkQueryPlan sessionVariables (SourceConfig {}) ir = do
queryRequest <- translateQueryDB ir
pure $ Plan queryRequest (reshapeResponseToQueryShape ir)
where
translateQueryDB ::
QueryDB 'DataConnector Void (UnpreparedValue 'DataConnector) ->
m API.QueryRequest
translateQueryDB =
\case
QDBMultipleRows annSelect -> translateAnnSelectToQueryRequest (translateAnnFieldsWithNoAggregates sessionVariables noPrefix) annSelect
QDBSingleRow annSelect -> translateAnnSelectToQueryRequest (translateAnnFieldsWithNoAggregates sessionVariables noPrefix) annSelect
QDBAggregation annSelect -> translateAnnSelectToQueryRequest (translateTableAggregateFields sessionVariables) annSelect
translateAnnSelectToQueryRequest ::
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT TableRelationships m FieldsAndAggregates) ->
AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector) ->
m API.QueryRequest
translateAnnSelectToQueryRequest translateFieldsAndAggregates selectG = do
tableName <- extractTableName selectG
(query, (TableRelationships tableRelationships)) <- CPS.runWriterT (translateAnnSelect sessionVariables translateFieldsAndAggregates tableName selectG)
let apiTableRelationships = uncurry API.TableRelationships <$> HashMap.toList tableRelationships
pure $
API.QueryRequest
{ _qrTable = tableName,
_qrTableRelationships = apiTableRelationships,
_qrQuery = query
}
extractTableName :: MonadError QErr m => AnnSelectG 'DataConnector fieldsType valueType -> m API.TableName
extractTableName selectG =
case _asnFrom selectG of
FromTable tn -> pure $ Witch.from tn
FromIdentifier _ -> throw400 NotSupported "AnnSelectG: FromIdentifier not supported"
FromFunction {} -> throw400 NotSupported "AnnSelectG: FromFunction not supported"
translateAnnSelect ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT writerOutput m FieldsAndAggregates) ->
API.TableName ->
AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m API.Query
translateAnnSelect sessionVariables translateFieldsAndAggregates tableName selectG = do
FieldsAndAggregates {..} <- translateFieldsAndAggregates tableName (_asnFields selectG)
let whereClauseWithPermissions =
case _saWhere (_asnArgs selectG) of
Just expr -> BoolAnd [expr, _tpFilter (_asnPerm selectG)]
Nothing -> _tpFilter (_asnPerm selectG)
whereClause <- translateBoolExpToExpression sessionVariables tableName whereClauseWithPermissions
orderBy <- traverse (translateOrderBy sessionVariables tableName) (_saOrderBy $ _asnArgs selectG)
pure
API.Query
{ _qFields = mapFieldNameHashMap _faaFields,
_qAggregates = mapFieldNameHashMap _faaAggregates,
_qLimit =
fmap getMin $
foldMap
(fmap Min)
[ _saLimit (_asnArgs selectG),
_tpLimit (_asnPerm selectG)
],
_qOffset = fmap fromIntegral (_saOffset (_asnArgs selectG)),
_qWhere = whereClause,
_qOrderBy = orderBy
}
translateOrderBy ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
NE.NonEmpty (AnnotatedOrderByItemG 'DataConnector (UnpreparedValue 'DataConnector)) ->
CPS.WriterT writerOutput m API.OrderBy
translateOrderBy sessionVariables sourceTableName orderByItems = do
orderByElementsAndRelations <- for orderByItems \OrderByItemG {..} -> do
let orderDirection = maybe API.Ascending Witch.from obiType
translateOrderByElement sessionVariables sourceTableName orderDirection [] obiColumn
relations <- lift . mergeOrderByRelations $ snd <$> orderByElementsAndRelations
pure
API.OrderBy
{ _obRelations = relations,
_obElements = fst <$> orderByElementsAndRelations
}
translateOrderByElement ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
API.OrderDirection ->
[API.RelationshipName] ->
AnnotatedOrderByElement 'DataConnector (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m (API.OrderByElement, HashMap API.RelationshipName API.OrderByRelation)
translateOrderByElement sessionVariables sourceTableName orderDirection targetReversePath = \case
AOCColumn (ColumnInfo {..}) ->
pure
( API.OrderByElement
{ _obeTargetPath = reverse targetReversePath,
_obeTarget = API.OrderByColumn $ Witch.from ciColumn,
_obeOrderDirection = orderDirection
},
mempty
)
AOCObjectRelation relationshipInfo filterExp orderByElement -> do
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
(translatedOrderByElement, subOrderByRelations) <- translateOrderByElement sessionVariables _rTargetTable orderDirection (relationshipName : targetReversePath) orderByElement
targetTableWhereExp <- translateBoolExpToExpression sessionVariables _rTargetTable filterExp
let orderByRelations = HashMap.fromList [(relationshipName, API.OrderByRelation targetTableWhereExp subOrderByRelations)]
pure (translatedOrderByElement, orderByRelations)
AOCArrayAggregation relationshipInfo filterExp aggregateOrderByElement -> do
(relationshipName, API.Relationship {..}) <- recordTableRelationshipFromRelInfo sourceTableName relationshipInfo
orderByTarget <- case aggregateOrderByElement of
AAOCount ->
pure API.OrderByStarCountAggregate
AAOOp aggFunctionTxt ColumnInfo {..} -> do
aggFunction <- lift $ translateSingleColumnAggregateFunction aggFunctionTxt
pure . API.OrderBySingleColumnAggregate $ API.SingleColumnAggregate aggFunction $ Witch.from ciColumn
let translatedOrderByElement =
API.OrderByElement
{ _obeTargetPath = reverse (relationshipName : targetReversePath),
_obeTarget = orderByTarget,
_obeOrderDirection = orderDirection
}
targetTableWhereExp <- translateBoolExpToExpression sessionVariables _rTargetTable filterExp
let orderByRelations = HashMap.fromList [(relationshipName, API.OrderByRelation targetTableWhereExp mempty)]
pure (translatedOrderByElement, orderByRelations)
mergeOrderByRelations ::
forall m f.
(MonadError QErr m, Foldable f) =>
f (HashMap API.RelationshipName API.OrderByRelation) ->
m (HashMap API.RelationshipName API.OrderByRelation)
mergeOrderByRelations orderByRelationsList =
foldM mergeMap mempty orderByRelationsList
where
mergeMap :: HashMap API.RelationshipName API.OrderByRelation -> HashMap API.RelationshipName API.OrderByRelation -> m (HashMap API.RelationshipName API.OrderByRelation)
mergeMap left right = foldM (\targetMap (relName, orderByRel) -> HashMap.alterF (maybe (pure $ Just orderByRel) (fmap Just . mergeOrderByRelation orderByRel)) relName targetMap) left $ HashMap.toList right
mergeOrderByRelation :: API.OrderByRelation -> API.OrderByRelation -> m API.OrderByRelation
mergeOrderByRelation right left =
if API._obrWhere left == API._obrWhere right
then do
mergedSubrelations <- mergeMap (API._obrSubrelations left) (API._obrSubrelations right)
pure $ API.OrderByRelation (API._obrWhere left) mergedSubrelations
else throw500 "mergeOrderByRelations: Differing filter expressions found for the same table"
translateAnnFieldsWithNoAggregates ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
FieldPrefix ->
API.TableName ->
AnnFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m FieldsAndAggregates
translateAnnFieldsWithNoAggregates sessionVariables fieldNamePrefix sourceTableName fields =
(\fields' -> FieldsAndAggregates fields' mempty) <$> translateAnnFields sessionVariables fieldNamePrefix sourceTableName fields
translateAnnFields ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
FieldPrefix ->
API.TableName ->
AnnFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m (HashMap FieldName API.Field)
translateAnnFields sessionVariables fieldNamePrefix sourceTableName fields = do
translatedFields <- traverse (traverse (translateAnnField sessionVariables sourceTableName)) fields
pure $ HashMap.fromList (mapMaybe (\(fieldName, field) -> (applyPrefix fieldNamePrefix fieldName,) <$> field) translatedFields)
translateAnnField ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
AnnFieldG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m (Maybe API.Field)
translateAnnField sessionVariables sourceTableName = \case
AFColumn colField ->
-- TODO: make sure certain fields in colField are not in use, since we don't
-- support them
pure . Just $ API.ColumnField (Witch.from $ _acfColumn colField) (Witch.from . columnTypeToScalarType $ _acfType colField)
AFObjectRelation objRel -> do
let targetTable = Witch.from $ _aosTableFrom (_aarAnnSelect objRel)
let relationshipName = mkRelationshipName $ _aarRelationshipName objRel
fields <- translateAnnFields sessionVariables noPrefix targetTable (_aosFields (_aarAnnSelect objRel))
whereClause <- translateBoolExpToExpression sessionVariables targetTable (_aosTableFilter (_aarAnnSelect objRel))
recordTableRelationship
sourceTableName
relationshipName
API.Relationship
{ _rTargetTable = targetTable,
_rRelationshipType = API.ObjectRelationship,
_rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList (_aarColumnMapping objRel)
}
pure . Just . API.RelField $
API.RelationshipField
relationshipName
( API.Query
{ _qFields = mapFieldNameHashMap fields,
_qAggregates = mempty,
_qWhere = whereClause,
_qLimit = Nothing,
_qOffset = Nothing,
_qOrderBy = Nothing
}
)
AFArrayRelation (ASSimple arrayRelationSelect) -> do
Just <$> translateArrayRelationSelect sessionVariables sourceTableName (translateAnnFieldsWithNoAggregates sessionVariables noPrefix) arrayRelationSelect
AFArrayRelation (ASAggregate arrayRelationSelect) ->
Just <$> translateArrayRelationSelect sessionVariables sourceTableName (translateTableAggregateFields sessionVariables) arrayRelationSelect
AFExpression _literal ->
-- We ignore literal text fields (we don't send them to the data connector agent)
-- and add them back to the response JSON when we reshape what the agent returns
-- to us
pure Nothing
translateArrayRelationSelect ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
(API.TableName -> Fields (fieldType (UnpreparedValue 'DataConnector)) -> CPS.WriterT writerOutput m FieldsAndAggregates) ->
AnnRelationSelectG 'DataConnector (AnnSelectG 'DataConnector fieldType (UnpreparedValue 'DataConnector)) ->
CPS.WriterT writerOutput m API.Field
translateArrayRelationSelect sessionVariables sourceTableName translateFieldsAndAggregates arrRel = do
targetTable <- lift $ extractTableName (_aarAnnSelect arrRel)
query <- translateAnnSelect sessionVariables translateFieldsAndAggregates targetTable (_aarAnnSelect arrRel)
let relationshipName = mkRelationshipName $ _aarRelationshipName arrRel
recordTableRelationship
sourceTableName
relationshipName
API.Relationship
{ _rTargetTable = targetTable,
_rRelationshipType = API.ArrayRelationship,
_rColumnMapping = HashMap.fromList $ bimap Witch.from Witch.from <$> HashMap.toList (_aarColumnMapping arrRel)
}
pure . API.RelField $
API.RelationshipField
relationshipName
query
translateTableAggregateFields ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
TableAggregateFieldsG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m FieldsAndAggregates
translateTableAggregateFields sessionVariables sourceTableName fields = do
mconcat <$> traverse (uncurry (translateTableAggregateField sessionVariables sourceTableName)) fields
translateTableAggregateField ::
( Has TableRelationships writerOutput,
Monoid writerOutput,
MonadError QErr m
) =>
SessionVariables ->
API.TableName ->
FieldName ->
TableAggregateFieldG 'DataConnector Void (UnpreparedValue 'DataConnector) ->
CPS.WriterT writerOutput m FieldsAndAggregates
translateTableAggregateField sessionVariables sourceTableName fieldName = \case
TAFAgg aggregateFields -> do
let fieldNamePrefix = prefixWith fieldName
translatedAggregateFields <- lift $ mconcat <$> traverse (uncurry (translateAggregateField fieldNamePrefix)) aggregateFields
pure $
FieldsAndAggregates
mempty
translatedAggregateFields
TAFNodes _ fields ->
translateAnnFieldsWithNoAggregates sessionVariables (prefixWith fieldName) sourceTableName fields
TAFExp _txt ->
-- We ignore literal text fields (we don't send them to the data connector agent)
-- and add them back to the response JSON when we reshape what the agent returns
-- to us
pure mempty
translateAggregateField ::
MonadError QErr m =>
FieldPrefix ->
FieldName ->
AggregateField 'DataConnector ->
m (HashMap FieldName API.Aggregate)
translateAggregateField fieldPrefix fieldName = \case
AFCount countAggregate ->
let aggregate =
case countAggregate of
StarCount -> API.StarCount
ColumnCount column -> API.ColumnCount $ API.ColumnCountAggregate {_ccaColumn = Witch.from column, _ccaDistinct = False}
ColumnDistinctCount column -> API.ColumnCount $ API.ColumnCountAggregate {_ccaColumn = Witch.from column, _ccaDistinct = True}
in pure $ HashMap.singleton (applyPrefix fieldPrefix fieldName) aggregate
AFOp AggregateOp {..} -> do
let fieldPrefix' = fieldPrefix <> prefixWith fieldName
aggFunction <- translateSingleColumnAggregateFunction _aoOp
fmap (HashMap.fromList . catMaybes) . forM _aoFields $ \(columnFieldName, columnField) ->
case columnField of
CFCol column _columnType ->
pure . Just $ (applyPrefix fieldPrefix' columnFieldName, API.SingleColumn . API.SingleColumnAggregate aggFunction $ Witch.from column)
CFExp _txt ->
-- We ignore literal text fields (we don't send them to the data connector agent)
-- and add them back to the response JSON when we reshape what the agent returns
-- to us
pure Nothing
AFExp _txt ->
-- We ignore literal text fields (we don't send them to the data connector agent)
-- and add them back to the response JSON when we reshape what the agent returns
-- to us
pure mempty
translateSingleColumnAggregateFunction :: MonadError QErr m => Text -> m API.SingleColumnAggregateFunction
translateSingleColumnAggregateFunction functionName =
fmap API.SingleColumnAggregateFunction (G.mkName functionName)
`onNothing` throw500 ("translateSingleColumnAggregateFunction: Invalid aggregate function encountered: " <> functionName)
--------------------------------------------------------------------------------
-- | Validate if a 'API.QueryRequest' contains any relationships.
queryHasRelations :: API.QueryRequest -> Bool
queryHasRelations API.QueryRequest {..} = _qrTableRelationships /= mempty
--------------------------------------------------------------------------------
reshapeResponseToQueryShape ::
MonadError QErr m =>
QueryDB 'DataConnector Void v ->
API.QueryResponse ->
m J.Encoding
reshapeResponseToQueryShape queryDb response =
case queryDb of
QDBMultipleRows simpleSelect -> reshapeSimpleSelectRows Many (_asnFields simpleSelect) response
QDBSingleRow simpleSelect -> reshapeSimpleSelectRows Single (_asnFields simpleSelect) response
QDBAggregation aggregateSelect -> reshapeTableAggregateFields (_asnFields aggregateSelect) response
reshapeSimpleSelectRows ::
MonadError QErr m =>
Cardinality ->
AnnFieldsG 'DataConnector Void v ->
API.QueryResponse ->
m J.Encoding
reshapeSimpleSelectRows cardinality fields API.QueryResponse {..} =
case cardinality of
Single ->
case rows of
[] -> pure $ J.toEncoding J.Null
[singleRow] -> reshapeAnnFields noPrefix fields singleRow
_multipleRows ->
throw500 "Data Connector agent returned multiple rows when only one was expected" -- TODO(dchambers): Add pathing information for error clarity
Many -> do
reshapedRows <- traverse (reshapeAnnFields noPrefix fields) rows
pure $ JE.list id reshapedRows
where
rows = fromMaybe mempty _qrRows
reshapeTableAggregateFields ::
MonadError QErr m =>
TableAggregateFieldsG 'DataConnector Void v ->
API.QueryResponse ->
m J.Encoding
reshapeTableAggregateFields tableAggregateFields API.QueryResponse {..} = do
reshapedFields <- forM tableAggregateFields $ \(fieldName@(FieldName fieldNameText), tableAggregateField) -> do
case tableAggregateField of
TAFAgg aggregateFields -> do
reshapedAggregateFields <- reshapeAggregateFields (prefixWith fieldName) aggregateFields responseAggregates
pure $ (fieldNameText, reshapedAggregateFields)
TAFNodes _ annFields -> do
reshapedRows <- traverse (reshapeAnnFields (prefixWith fieldName) annFields) responseRows
pure $ (fieldNameText, JE.list id reshapedRows)
TAFExp txt ->
pure $ (fieldNameText, JE.text txt)
pure $ encodeAssocListAsObject reshapedFields
where
responseRows = fromMaybe mempty _qrRows
responseAggregates = fromMaybe mempty _qrAggregates
reshapeAggregateFields ::
MonadError QErr m =>
FieldPrefix ->
AggregateFields 'DataConnector ->
HashMap API.FieldName J.Value ->
m J.Encoding
reshapeAggregateFields fieldPrefix aggregateFields responseAggregates = do
reshapedFields <- forM aggregateFields $ \(fieldName@(FieldName fieldNameText), aggregateField) ->
case aggregateField of
AFCount _countAggregate -> do
let fieldNameKey = API.FieldName . getFieldNameTxt $ applyPrefix fieldPrefix fieldName
responseAggregateValue <-
HashMap.lookup fieldNameKey responseAggregates
`onNothing` throw500 ("Unable to find expected aggregate " <> API.unFieldName fieldNameKey <> " in aggregates returned by Data Connector agent") -- TODO(dchambers): Add pathing information for error clarity
pure (fieldNameText, J.toEncoding responseAggregateValue)
AFOp AggregateOp {..} -> do
reshapedColumnFields <- forM _aoFields $ \(columnFieldName@(FieldName columnFieldNameText), columnField) ->
case columnField of
CFCol _column _columnType -> do
let fieldPrefix' = fieldPrefix <> prefixWith fieldName
let columnFieldNameKey = API.FieldName . getFieldNameTxt $ applyPrefix fieldPrefix' columnFieldName
responseAggregateValue <-
HashMap.lookup columnFieldNameKey responseAggregates
`onNothing` throw500 ("Unable to find expected aggregate " <> API.unFieldName columnFieldNameKey <> " in aggregates returned by Data Connector agent") -- TODO(dchambers): Add pathing information for error clarity
pure (columnFieldNameText, J.toEncoding responseAggregateValue)
CFExp txt ->
pure (columnFieldNameText, JE.text txt)
pure (fieldNameText, encodeAssocListAsObject reshapedColumnFields)
AFExp txt ->
pure (fieldNameText, JE.text txt)
pure $ encodeAssocListAsObject reshapedFields
reshapeAnnFields ::
MonadError QErr m =>
FieldPrefix ->
AnnFieldsG 'DataConnector Void v ->
HashMap API.FieldName API.FieldValue ->
m J.Encoding
reshapeAnnFields fieldNamePrefix fields responseRow = do
reshapedFields <- forM fields $ \(fieldName@(FieldName fieldNameText), field) -> do
let fieldNameKey = API.FieldName . getFieldNameTxt $ applyPrefix fieldNamePrefix fieldName
let responseField =
HashMap.lookup fieldNameKey responseRow
`onNothing` throw500 ("Unable to find expected field " <> API.unFieldName fieldNameKey <> " in row returned by Data Connector agent") -- TODO(dchambers): Add pathing information for error clarity
reshapedField <- reshapeField field responseField
pure (fieldNameText, reshapedField)
pure $ encodeAssocListAsObject reshapedFields
reshapeField ::
MonadError QErr m =>
AnnFieldG 'DataConnector Void v ->
m API.FieldValue -> -- This lookup is lazy (behind the monad) so that we can _not_ do it when we've got an AFExpression
m J.Encoding
reshapeField field responseFieldValue =
case field of
AFColumn _columnField -> do
columnFieldValue <- API.deserializeAsColumnFieldValue <$> responseFieldValue
pure $ J.toEncoding columnFieldValue
AFObjectRelation objectRelationField -> do
relationshipFieldValue <- API.deserializeAsRelationshipFieldValue <$> responseFieldValue
case relationshipFieldValue of
Left err -> throw500 $ "Found column field value where relationship field value was expected in field returned by Data Connector agent: " <> err -- TODO(dchambers): Add pathing information for error clarity
Right subqueryResponse ->
let fields = _aosFields $ _aarAnnSelect objectRelationField
in reshapeSimpleSelectRows Single fields subqueryResponse
AFArrayRelation (ASSimple simpleArrayRelationField) ->
reshapeAnnRelationSelect (reshapeSimpleSelectRows Many) simpleArrayRelationField =<< responseFieldValue
AFArrayRelation (ASAggregate aggregateArrayRelationField) ->
reshapeAnnRelationSelect reshapeTableAggregateFields aggregateArrayRelationField =<< responseFieldValue
AFExpression txt -> pure $ JE.text txt
reshapeAnnRelationSelect ::
MonadError QErr m =>
(Fields (fieldType v) -> API.QueryResponse -> m J.Encoding) ->
AnnRelationSelectG 'DataConnector (AnnSelectG 'DataConnector fieldType v) ->
API.FieldValue ->
m J.Encoding
reshapeAnnRelationSelect reshapeFields annRelationSelect fieldValue =
case API.deserializeAsRelationshipFieldValue fieldValue of
Left err -> throw500 $ "Found column field value where relationship field value was expected in field returned by Data Connector agent: " <> err -- TODO(dchambers): Add pathing information for error clarity
Right subqueryResponse ->
let annSimpleSelect = _aarAnnSelect annRelationSelect
in reshapeFields (_asnFields annSimpleSelect) subqueryResponse
--------------------------------------------------------------------------------
mapFieldNameHashMap :: Eq v => HashMap FieldName v -> Maybe (HashMap API.FieldName v)
mapFieldNameHashMap = memptyToNothing . HashMap.mapKeys (API.FieldName . getFieldNameTxt)
memptyToNothing :: (Monoid m, Eq m) => m -> Maybe m
memptyToNothing m = if m == mempty then Nothing else Just m

View File

@ -240,12 +240,13 @@ msDBMutationPlan ::
MonadReader QueryTagsComment m
) =>
UserInfo ->
Env.Environment ->
Options.StringifyNumbers ->
SourceName ->
SourceConfig 'MSSQL ->
MutationDB 'MSSQL Void (UnpreparedValue 'MSSQL) ->
m (DBStepInfo 'MSSQL)
msDBMutationPlan userInfo stringifyNum sourceName sourceConfig mrf = do
msDBMutationPlan userInfo _environment stringifyNum sourceName sourceConfig mrf = do
go <$> case mrf of
MDBInsert annInsert -> executeInsert userInfo stringifyNum sourceConfig annInsert
MDBDelete annDelete -> executeDelete userInfo stringifyNum sourceConfig annDelete

View File

@ -281,12 +281,13 @@ pgDBMutationPlan ::
MonadReader QueryTagsComment m
) =>
UserInfo ->
Env.Environment ->
Options.StringifyNumbers ->
SourceName ->
SourceConfig ('Postgres pgKind) ->
MutationDB ('Postgres pgKind) Void (UnpreparedValue ('Postgres pgKind)) ->
m (DBStepInfo ('Postgres pgKind))
pgDBMutationPlan userInfo stringifyNum sourceName sourceConfig mrf = do
pgDBMutationPlan userInfo _environment stringifyNum sourceName sourceConfig mrf = do
go <$> case mrf of
MDBInsert s -> convertInsert userInfo s stringifyNum
MDBUpdate s -> convertUpdate userInfo s stringifyNum

View File

@ -83,6 +83,7 @@ class
MonadReader QueryTagsComment m
) =>
UserInfo ->
Env.Environment ->
Options.StringifyNumbers ->
SourceName ->
SourceConfig b ->

View File

@ -129,7 +129,7 @@ convertMutationSelectionSet
let mutationQueryTagsAttributes = encodeQueryTags $ QTMutation $ MutationMetadata reqId maybeOperationName rootFieldName parameterizedQueryHash
queryTagsComment = Tagged.untag $ createQueryTags @m mutationQueryTagsAttributes queryTagsConfig
(noRelsDBAST, remoteJoins) = RJ.getRemoteJoinsMutationDB db
dbStepInfo <- flip runReaderT queryTagsComment $ mkDBMutationPlan @b userInfo stringifyNum sourceName sourceConfig noRelsDBAST
dbStepInfo <- flip runReaderT queryTagsComment $ mkDBMutationPlan @b userInfo env stringifyNum sourceName sourceConfig noRelsDBAST
pure $ ExecStepDB [] (AB.mkAnyBackend dbStepInfo) remoteJoins
RFRemote remoteField -> do
RemoteSchemaRootField remoteSchemaInfo resultCustomizer resolvedRemoteField <- runVariableCache $ resolveRemoteField userInfo remoteField

View File

@ -73,11 +73,12 @@ spec = do
let returningFields = [(FieldName "field", ColumnField (ColumnName "my_column") (ScalarType "string"))]
describe "InsertOperation" $ do
testToFromJSONToSchema
(InsertOperation (InsertMutationOperation (TableName ["my_table"]) [] returningFields))
(InsertOperation (InsertMutationOperation (TableName ["my_table"]) [] (Just $ And []) returningFields))
[aesonQQ|
{ "type": "insert",
"table": ["my_table"],
"rows": [],
"post_insert_check": { "type": "and", "expressions": [] },
"returning_fields": {
"field": {
"type": "column",
@ -89,12 +90,13 @@ spec = do
|]
describe "UpdateOperation" $ do
testToFromJSONToSchema
(UpdateOperation (UpdateMutationOperation (TableName ["my_table"]) (Just $ And []) [] returningFields))
(UpdateOperation (UpdateMutationOperation (TableName ["my_table"]) (Just $ And []) [] (Just $ And []) returningFields))
[aesonQQ|
{ "type": "update",
"table": ["my_table"],
"where": { "type": "and", "expressions": [] },
"updates": [],
"post_update_check": { "type": "and", "expressions": [] },
"returning_fields": {
"field": {
"type": "column",
@ -272,6 +274,7 @@ genInsertMutationOperation =
InsertMutationOperation
<$> genTableName
<*> Gen.list defaultRange genRowObject
<*> Gen.maybe genExpression
<*> genFieldMap genField
genRowObject :: Gen RowObject
@ -292,6 +295,7 @@ genUpdateMutationOperation =
<$> genTableName
<*> Gen.maybe genExpression
<*> Gen.list defaultRange genRowUpdate
<*> Gen.maybe genExpression
<*> genFieldMap genField
genRowUpdate :: (MonadGen m, GenBase m ~ Identity) => m RowUpdate