Raw Query Support for Data Connectors - GDW-394

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/5890
Co-authored-by: Vijay Prasanna <11921040+vijayprasanna13@users.noreply.github.com>
GitOrigin-RevId: f6bd2ed5fe170bcce262564cf4f45c95c9bdff94
This commit is contained in:
Lyndon Maydwell 2022-09-23 07:07:54 +10:00 committed by hasura-bot
parent e0c0d7b73f
commit 4d2e37b3e6
29 changed files with 528 additions and 63 deletions

View File

@ -36,6 +36,13 @@ import { services } from '../../../../dataSources/services';
import { isFeatureSupported, setDriver } from '../../../../dataSources';
import { fetchDataInit, UPDATE_CURRENT_DATA_SOURCE } from '../DataActions';
import { unsupportedRawSQLDrivers } from './utils';
import { nativeDrivers } from '@/features/DataSource';
import { useRunSQL } from './hooks/useRunSQL';
import { useFireNotification } from '@/new-components/Notifications';
import {
availableFeatureFlagIds,
useIsFeatureFlagEnabled,
} from '@/features/FeatureFlags';
const checkChangeLang = (sql, selectedDriver) => {
return (
@ -80,9 +87,24 @@ const RawSQL = ({
isTableTrackChecked,
migrationMode,
allSchemas,
sources,
// sources,
currentDataSource,
metadataSources,
}) => {
const { enabled: areGDCFeaturesEnabled } = useIsFeatureFlagEnabled(
availableFeatureFlagIds.gdcId
);
const { fireNotification } = useFireNotification();
const { fetchRunSQLResult, data, isLoading } = useRunSQL({
onError: err => {
fireNotification({
type: 'error',
title: 'failed to run SQL statement',
message: err?.message,
});
},
});
const [statementTimeout, setStatementTimeout] = useState(
Number(getLSItem(LS_KEYS.rawSqlStatementTimeout)) || 10
);
@ -94,14 +116,25 @@ const RawSQL = ({
const [suggestLangChange, setSuggestLangChange] = useState(false);
useEffect(() => {
const driver = getSourceDriver(sources, selectedDatabase);
const driver = getSourceDriver(metadataSources, selectedDatabase);
setSelectedDriver(driver);
if (!nativeDrivers.includes(driver)) {
setStatementTimeout(null);
return;
}
if (!isFeatureSupported('rawSQL.statementTimeout'))
setStatementTimeout(null);
}, [selectedDatabase, sources]);
}, [selectedDatabase, metadataSources]);
const dropDownSelectorValueChange = value => {
const driver = getSourceDriver(sources, value);
const driver = getSourceDriver(metadataSources, value);
if (!nativeDrivers.includes(driver)) {
setSelectedDatabase(value);
return;
}
dispatch({
type: UPDATE_CURRENT_DATA_SOURCE,
source: value,
@ -134,6 +167,15 @@ const RawSQL = ({
}, [sql, selectedDriver]);
const submitSQL = () => {
if (!nativeDrivers.includes(selectedDriver)) {
fetchRunSQLResult({
driver: selectedDriver,
dataSourceName: selectedDatabase,
sql: sqlText,
});
return;
}
if (!sqlText) {
setLSItem(LS_KEYS.rawSQLKey, '');
return;
@ -451,10 +493,15 @@ const RawSQL = ({
<b>Database</b>
</label>{' '}
<DropDownSelector
options={sources.map(source => ({
name: source.name,
driver: source.driver,
}))}
options={metadataSources
.filter(source => {
if (areGDCFeaturesEnabled) return source;
return nativeDrivers.includes(source.kind);
})
.map(source => ({
name: source.name,
driver: source.kind,
}))}
defaultValue={currentDataSource}
onChange={dropDownSelectorValueChange}
/>
@ -493,6 +540,7 @@ const RawSQL = ({
mode="primary"
data-test="run-sql"
disabled={!sqlText.length}
isLoading={isLoading}
>
Run!
</Button>
@ -514,14 +562,22 @@ const RawSQL = ({
{getMigrationWarningModal()}
<div className={styles.add_mar_bottom}>
{resultType &&
resultType !== 'command' &&
result &&
result?.length > 0 && (
<ResultTable rows={result} headers={resultHeaders} />
{nativeDrivers.includes(selectedDriver) ? (
<div className={styles.add_mar_bottom}>
{resultType &&
resultType !== 'command' &&
result &&
result?.length > 0 && (
<ResultTable rows={result} headers={resultHeaders} />
)}
</div>
) : (
<div className={styles.add_mar_bottom}>
{data && data.result.length > 0 && (
<ResultTable rows={data.result.slice(1)} headers={data.result[0]} />
)}
</div>
</div>
)}
</div>
);
};
@ -552,6 +608,7 @@ const mapStateToProps = state => ({
serverVersion: state.main.serverVersion ? state.main.serverVersion : '',
sources: getDataSources(state),
currentDataSource: state.tables.currentDataSource,
metadataSources: state.metadata.metadataObject.sources,
});
const rawSQLConnector = connect => connect(mapStateToProps)(RawSQL);

View File

@ -0,0 +1,56 @@
// eslint-disable-next-line no-restricted-imports
import { runSQL, RunSQLResponse } from '@/features/DataSource/api';
import { SupportedDrivers } from '@/features/MetadataAPI';
import { useHttpClient } from '@/features/Network';
import { useCallback, useState } from 'react';
/**
* This run SQL hook is the new implementation of the run sql api using react hooks. Right now, it's used only
* for gdc based sources since the old rawSQL.js UI needs a rewrite and decoupling from redux
*/
export const useRunSQL = (props: { onError?: (err: unknown) => void }) => {
const httpClient = useHttpClient();
const [data, setData] = useState<RunSQLResponse | undefined>();
const [isLoading, setIsLoading] = useState(false);
const [error, setError] = useState<Error | undefined>();
const fetchRunSQLResult = useCallback(
async ({
driver,
dataSourceName,
sql,
}: {
dataSourceName: string;
driver: SupportedDrivers;
sql: string;
}) => {
setData(undefined);
setIsLoading(true);
try {
const result = await runSQL({
httpClient,
source: {
kind: driver,
name: dataSourceName,
},
sql,
});
setData(result);
setIsLoading(false);
} catch (err) {
setError(err as Error);
if (props.onError) {
props.onError(err);
}
} finally {
setIsLoading(false);
}
},
[httpClient]
);
return { fetchRunSQLResult, data, isLoading, error };
};

View File

@ -61,10 +61,7 @@ export const runSQL = async ({
sql,
httpClient,
}: RunSqlArgs & NetworkArgs): Promise<RunSQLResponse> => {
if (source.kind === 'gdc') throw Error('GDC does not support run sql');
const type = getRunSqlType(source.kind);
const result = await runQuery<RunSQLResponse>({
httpClient,
body: {

View File

@ -1,6 +1,6 @@
{
"name": "@hasura/dc-api-types",
"version": "0.7.0",
"version": "0.8.0",
"description": "Hasura GraphQL Engine Data Connector Agent API types",
"author": "Hasura (https://github.com/hasura/graphql-engine)",
"license": "Apache-2.0",

View File

@ -203,6 +203,54 @@
}
}
}
},
"/raw": {
"post": {
"parameters": [
{
"in": "header",
"name": "X-Hasura-DataConnector-SourceName",
"required": true,
"schema": {
"type": "string"
}
},
{
"in": "header",
"name": "X-Hasura-DataConnector-Config",
"required": true,
"schema": {
"additionalProperties": true,
"nullable": false,
"type": "object"
}
}
],
"requestBody": {
"content": {
"application/json;charset=utf-8": {
"schema": {
"$ref": "#/components/schemas/RawRequest"
}
}
}
},
"responses": {
"200": {
"content": {
"application/json;charset=utf-8": {
"schema": {
"$ref": "#/components/schemas/RawResponse"
}
}
},
"description": ""
},
"400": {
"description": "Invalid `body` or `X-Hasura-DataConnector-Config` or `X-Hasura-DataConnector-SourceName`"
}
}
}
}
},
"components": {
@ -243,6 +291,9 @@
"queries": {
"$ref": "#/components/schemas/QueryCapabilities"
},
"raw": {
"$ref": "#/components/schemas/RawCapabilities"
},
"relationships": {
"$ref": "#/components/schemas/RelationshipCapabilities"
},
@ -317,6 +368,7 @@
},
"MetricsCapabilities": {},
"ExplainCapabilities": {},
"RawCapabilities": {},
"ConfigSchemaResponse": {
"nullable": false,
"properties": {
@ -1580,6 +1632,36 @@
"query"
],
"type": "object"
},
"RawResponse": {
"properties": {
"rows": {
"description": "The rows returned by the raw query.",
"items": {
"additionalProperties": {
"additionalProperties": true
},
"type": "object"
},
"type": "array"
}
},
"required": [
"rows"
],
"type": "object"
},
"RawRequest": {
"properties": {
"query": {
"description": "A string representing a raw query",
"type": "string"
}
},
"required": [
"query"
],
"type": "object"
}
}
}

View File

@ -51,6 +51,9 @@ export type { Query } from './models/Query';
export type { QueryCapabilities } from './models/QueryCapabilities';
export type { QueryRequest } from './models/QueryRequest';
export type { QueryResponse } from './models/QueryResponse';
export type { RawCapabilities } from './models/RawCapabilities';
export type { RawRequest } from './models/RawRequest';
export type { RawResponse } from './models/RawResponse';
export type { RelatedTable } from './models/RelatedTable';
export type { Relationship } from './models/Relationship';
export type { RelationshipCapabilities } from './models/RelationshipCapabilities';

View File

@ -8,6 +8,7 @@ import type { GraphQLTypeDefinitions } from './GraphQLTypeDefinitions';
import type { MetricsCapabilities } from './MetricsCapabilities';
import type { MutationCapabilities } from './MutationCapabilities';
import type { QueryCapabilities } from './QueryCapabilities';
import type { RawCapabilities } from './RawCapabilities';
import type { RelationshipCapabilities } from './RelationshipCapabilities';
import type { ScalarTypesCapabilities } from './ScalarTypesCapabilities';
import type { SubscriptionCapabilities } from './SubscriptionCapabilities';
@ -19,6 +20,7 @@ export type Capabilities = {
metrics?: MetricsCapabilities;
mutations?: MutationCapabilities;
queries?: QueryCapabilities;
raw?: RawCapabilities;
relationships?: RelationshipCapabilities;
scalar_types?: ScalarTypesCapabilities;
subscriptions?: SubscriptionCapabilities;

View File

@ -0,0 +1,7 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export type RawCapabilities = {
};

View File

@ -0,0 +1,11 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export type RawRequest = {
/**
* A string representing a raw query
*/
query: string;
};

View File

@ -0,0 +1,11 @@
/* istanbul ignore file */
/* tslint:disable */
/* eslint-disable */
export type RawResponse = {
/**
* The rows returned by the raw query.
*/
rows: Array<Record<string, any>>;
};

View File

@ -24,7 +24,7 @@
},
"dc-api-types": {
"name": "@hasura/dc-api-types",
"version": "0.7.0",
"version": "0.8.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",
@ -631,7 +631,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^7.0.0",
"@hasura/dc-api-types": "0.7.0",
"@hasura/dc-api-types": "0.8.0",
"fastify": "^3.29.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",
@ -1389,14 +1389,13 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.7.0",
"@hasura/dc-api-types": "0.8.0",
"fastify": "^4.4.0",
"fastify-metrics": "^9.2.1",
"nanoid": "^3.3.4",
"openapi3-ts": "^2.0.2",
"pino-pretty": "^8.1.0",
"sequelize": "^6.21.2",
"sqlite": "^4.1.1",
"sqlite-parser": "^1.0.1",
"sqlite3": "^5.0.8",
"sqlstring-sqlite": "^0.1.1"
@ -2890,10 +2889,6 @@
"node": ">= 10.x"
}
},
"sqlite/node_modules/sqlite": {
"version": "4.1.1",
"license": "MIT"
},
"sqlite/node_modules/sqlite-parser": {
"version": "1.0.1",
"license": "MIT",
@ -3127,7 +3122,7 @@
"version": "file:reference",
"requires": {
"@fastify/cors": "^7.0.0",
"@hasura/dc-api-types": "0.7.0",
"@hasura/dc-api-types": "0.8.0",
"@tsconfig/node16": "^1.0.3",
"@types/node": "^16.11.49",
"@types/xml2js": "^0.4.11",
@ -3618,7 +3613,7 @@
"version": "file:sqlite",
"requires": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.7.0",
"@hasura/dc-api-types": "0.8.0",
"@tsconfig/node16": "^1.0.3",
"@types/node": "^16.11.49",
"@types/sqlite3": "^3.1.8",
@ -3629,7 +3624,6 @@
"openapi3-ts": "^2.0.2",
"pino-pretty": "^8.1.0",
"sequelize": "^6.21.2",
"sqlite": "^4.1.1",
"sqlite-parser": "^1.0.1",
"sqlite3": "^5.0.8",
"sqlstring-sqlite": "^0.1.1",
@ -4601,9 +4595,6 @@
"split2": {
"version": "4.1.0"
},
"sqlite": {
"version": "4.1.1"
},
"sqlite-parser": {
"version": "1.0.1"
},

View File

@ -10,7 +10,7 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^7.0.0",
"@hasura/dc-api-types": "0.7.0",
"@hasura/dc-api-types": "0.8.0",
"fastify": "^3.29.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",
@ -44,7 +44,7 @@
}
},
"node_modules/@hasura/dc-api-types": {
"version": "0.7.0",
"version": "0.8.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",

View File

@ -22,7 +22,7 @@
},
"dependencies": {
"@fastify/cors": "^7.0.0",
"@hasura/dc-api-types": "0.7.0",
"@hasura/dc-api-types": "0.8.0",
"fastify": "^3.29.0",
"mathjs": "^11.0.0",
"pino-pretty": "^8.0.0",

View File

@ -10,14 +10,13 @@
"license": "Apache-2.0",
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.7.0",
"@hasura/dc-api-types": "0.8.0",
"fastify": "^4.4.0",
"fastify-metrics": "^9.2.1",
"nanoid": "^3.3.4",
"openapi3-ts": "^2.0.2",
"pino-pretty": "^8.1.0",
"sequelize": "^6.21.2",
"sqlite": "^4.1.1",
"sqlite-parser": "^1.0.1",
"sqlite3": "^5.0.8",
"sqlstring-sqlite": "^0.1.1"
@ -55,7 +54,7 @@
"license": "MIT"
},
"node_modules/@hasura/dc-api-types": {
"version": "0.7.0",
"version": "0.8.0",
"license": "Apache-2.0",
"devDependencies": {
"@tsconfig/node16": "^1.0.3",
@ -895,10 +894,6 @@
"@types/node": "*"
}
},
"node_modules/sqlite": {
"version": "4.1.1",
"license": "MIT"
},
"node_modules/sqlite-parser": {
"version": "1.0.1",
"license": "MIT",

View File

@ -22,7 +22,7 @@
},
"dependencies": {
"@fastify/cors": "^8.1.0",
"@hasura/dc-api-types": "0.7.0",
"@hasura/dc-api-types": "0.8.0",
"fastify-metrics": "^9.2.1",
"fastify": "^4.4.0",
"nanoid": "^3.3.4",
@ -30,7 +30,6 @@
"pino-pretty": "^8.1.0",
"sequelize": "^6.21.2",
"sqlite-parser": "^1.0.1",
"sqlite": "^4.1.1",
"sqlite3": "^5.0.8",
"sqlstring-sqlite": "^0.1.1"
},

View File

@ -15,6 +15,7 @@ export const capabilitiesResponse: CapabilitiesResponse = {
}
},
explain: {},
raw: {},
... ( envToBool('METRICS') ? { metrics: {} } : {} )
},
}

View File

@ -4,12 +4,13 @@ import { getSchema } from './schema';
import { explain, queryData } from './query';
import { getConfig, tryGetConfig } from './config';
import { capabilitiesResponse } from './capabilities';
import { QueryResponse, SchemaResponse, QueryRequest, CapabilitiesResponse, ExplainResponse } from '@hasura/dc-api-types';
import { QueryResponse, SchemaResponse, QueryRequest, CapabilitiesResponse, ExplainResponse, RawRequest, RawResponse } from '@hasura/dc-api-types';
import { connect } from './db';
import { envToBool, envToString } from './util';
import metrics from 'fastify-metrics';
import prometheus from 'prom-client';
import * as fs from 'fs'
import { runRawOperation } from './raw';
const port = Number(process.env.PORT) || 8100;
@ -107,6 +108,13 @@ server.post<{ Body: QueryRequest, Reply: QueryResponse }>("/query", async (reque
return result;
});
// TODO: Use derived types for body and reply
server.post<{ Body: RawRequest, Reply: RawResponse }>("/raw", async (request, _response) => {
server.log.info({ headers: request.headers, query: request.body, }, "schema.raw");
const config = getConfig(request);
return runRawOperation(config, sqlLogger, request.body);
});
server.post<{ Body: QueryRequest, Reply: ExplainResponse}>("/explain", async (request, _response) => {
server.log.info({ headers: request.headers, query: request.body, }, "query.request");
const config = getConfig(request);
@ -156,6 +164,7 @@ server.get("/", async (request, response) => {
<li><a href="/capabilities">GET /capabilities - Capabilities Metadata</a>
<li><a href="/schema">GET /schema - Agent Schema</a>
<li><a href="/query">POST /query - Query Handler</a>
<li><a href="/raw">POST /raw - Raw Query Handler</a>
<li><a href="/health">GET /health - Healthcheck</a>
<li><a href="/swagger.json">GET /swagger.json - Swagger JSON</a>
<li><a href="/metrics">GET /metrics - Prometheus formatted metrics</a>

View File

@ -0,0 +1,12 @@
import { Config } from "./config";
import { connect, SqlLogger } from './db';
import { RawRequest, RawResponse } from '@hasura/dc-api-types';
export async function runRawOperation(config: Config, sqlLogger: SqlLogger, query: RawRequest): Promise<RawResponse> {
const db = connect(config, sqlLogger);
const [results, metadata] = await db.query(query.query);
return {
rows: results as Array<Record<string, any>>
};
};

View File

@ -390,6 +390,7 @@ library dc-api
, Hasura.Backends.DataConnector.API.V0.Expression
, Hasura.Backends.DataConnector.API.V0.OrderBy
, Hasura.Backends.DataConnector.API.V0.Query
, Hasura.Backends.DataConnector.API.V0.Raw
, Hasura.Backends.DataConnector.API.V0.Explain
, Hasura.Backends.DataConnector.API.V0.Relationships
, Hasura.Backends.DataConnector.API.V0.Scalar
@ -586,6 +587,7 @@ library
, Hasura.Backends.DataConnector.Adapter.Backend
, Hasura.Backends.DataConnector.Adapter.Execute
, Hasura.Backends.DataConnector.Adapter.ConfigTransform
, Hasura.Backends.DataConnector.Adapter.RunSQL
, Hasura.Backends.DataConnector.Adapter.Metadata
, Hasura.Backends.DataConnector.Adapter.Schema
, Hasura.Backends.DataConnector.Adapter.SchemaCache

View File

@ -62,6 +62,13 @@ type HealthApi =
:> ConfigHeader Optional
:> GetNoContent
type RawApi =
"raw"
:> SourceNameHeader Required
:> ConfigHeader Required
:> ReqBody '[JSON] V0.RawRequest
:> Post '[JSON] V0.RawResponse
data Prometheus
-- NOTE: This seems like quite a brittle definition and we may want to be
@ -98,13 +105,15 @@ data Routes mode = Routes
-- | 'GET /health'
_health :: mode :- HealthApi,
-- | 'GET /metrics'
_metrics :: mode :- MetricsApi
_metrics :: mode :- MetricsApi,
-- | 'GET /metrics'
_raw :: mode :- RawApi
}
deriving stock (Generic)
-- | servant-openapi3 does not (yet) support NamedRoutes so we need to compose the
-- API the old-fashioned way using :<|> for use by @toOpenApi@
type Api = CapabilitiesApi :<|> SchemaApi :<|> QueryApi :<|> ExplainApi :<|> HealthApi :<|> MetricsApi
type Api = CapabilitiesApi :<|> SchemaApi :<|> QueryApi :<|> ExplainApi :<|> HealthApi :<|> MetricsApi :<|> RawApi
-- | Provide an OpenApi 3.0 schema for the API
openApiSchema :: OpenApi

View File

@ -6,6 +6,7 @@ module Hasura.Backends.DataConnector.API.V0
module Expression,
module OrderBy,
module Query,
module Raw,
module Explain,
module Relationships,
module Scalar,
@ -22,6 +23,7 @@ import Hasura.Backends.DataConnector.API.V0.Explain as Explain
import Hasura.Backends.DataConnector.API.V0.Expression as Expression
import Hasura.Backends.DataConnector.API.V0.OrderBy as OrderBy
import Hasura.Backends.DataConnector.API.V0.Query as Query
import Hasura.Backends.DataConnector.API.V0.Raw as Raw
import Hasura.Backends.DataConnector.API.V0.Relationships as Relationships
import Hasura.Backends.DataConnector.API.V0.Scalar as Scalar
import Hasura.Backends.DataConnector.API.V0.Schema as Schema

View File

@ -17,6 +17,7 @@ module Hasura.Backends.DataConnector.API.V0.Capabilities
SubqueryComparisonCapabilities (..),
MetricsCapabilities (..),
ExplainCapabilities (..),
RawCapabilities (..),
CapabilitiesResponse (..),
emptyCapabilities,
lookupComparisonInputObjectDefinition,
@ -63,14 +64,15 @@ data Capabilities = Capabilities
_cRelationships :: Maybe RelationshipCapabilities,
_cComparisons :: Maybe ComparisonCapabilities,
_cMetrics :: Maybe MetricsCapabilities,
_cExplain :: Maybe ExplainCapabilities
_cExplain :: Maybe ExplainCapabilities,
_cRaw :: Maybe RawCapabilities
}
deriving stock (Eq, Show, Generic)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec Capabilities
emptyCapabilities :: Capabilities
emptyCapabilities = Capabilities Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
emptyCapabilities = Capabilities Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing
instance HasCodec Capabilities where
codec =
@ -85,6 +87,7 @@ instance HasCodec Capabilities where
<*> optionalField "comparisons" "The agent's comparison capabilities" .= _cComparisons
<*> optionalField "metrics" "The agent's metrics capabilities" .= _cMetrics
<*> optionalField "explain" "The agent's explain capabilities" .= _cExplain
<*> optionalField "raw" "The agent's raw query capabilities" .= _cRaw
data QueryCapabilities = QueryCapabilities
{ _qcSupportsPrimaryKeys :: Bool
@ -265,6 +268,15 @@ instance HasCodec ExplainCapabilities where
codec =
object "ExplainCapabilities" $ pure ExplainCapabilities
data RawCapabilities = RawCapabilities {}
deriving stock (Eq, Ord, Show, Generic, Data)
deriving anyclass (NFData, Hashable)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec RawCapabilities
instance HasCodec RawCapabilities where
codec =
object "RawCapabilities" $ pure RawCapabilities
data CapabilitiesResponse = CapabilitiesResponse
{ _crCapabilities :: Capabilities,
_crConfigSchemaResponse :: ConfigSchemaResponse

View File

@ -0,0 +1,52 @@
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE TemplateHaskell #-}
module Hasura.Backends.DataConnector.API.V0.Raw
( RawRequest (..),
RawResponse (..),
rrQuery,
rrRows,
)
where
import Autodocodec.Extended
import Autodocodec.OpenAPI ()
import Control.Lens.TH (makeLenses)
import Data.Aeson (FromJSON, ToJSON, Value)
import Data.Data (Data)
import Data.HashMap.Strict qualified as H
import Data.OpenApi (ToSchema)
import Data.Text (Text)
import GHC.Generics (Generic)
import Prelude
-- | A serializable request to retrieve structured data from some
-- source.
data RawRequest = RawRequest
{ _rrQuery :: Text
}
deriving stock (Eq, Ord, Show, Generic, Data)
deriving (FromJSON, ToJSON, ToSchema) via Autodocodec RawRequest
instance HasCodec RawRequest where
codec =
object "RawRequest" $
RawRequest
<$> requiredField "query" "A string representing a raw query" .= _rrQuery
-- | The resolved query response provided by the 'POST /raw'
-- endpoint encoded as a list of JSON objects.
data RawResponse = RawResponse
{ _rrRows :: [H.HashMap Text Value]
}
deriving stock (Eq, Ord, Show)
deriving (ToJSON, FromJSON, ToSchema) via Autodocodec RawResponse
instance HasCodec RawResponse where
codec =
named "RawResponse" . object "RawResponse" $
RawResponse
<$> requiredField "rows" "The rows returned by the raw query." .= _rrRows
$(makeLenses ''RawRequest)
$(makeLenses ''RawResponse)

View File

@ -0,0 +1,114 @@
module Hasura.Backends.DataConnector.Adapter.RunSQL
( DataConnectorRunSQL (..),
runSQL,
)
where
import Data.Aeson qualified as J
import Data.HashMap.Strict qualified as H
import Data.Text.Extended (ToTxt (..))
import Hasura.Backends.DataConnector.API (RawRequest (..))
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Backends.DataConnector.Adapter.Types (DataConnectorName (), SourceConfig (..))
import Hasura.Base.Error (Code (DataConnectorError), QErr (qeInternal), QErrExtra (ExtraInternal), err400)
import Hasura.EncJSON (EncJSON, encJFromJValue)
import Hasura.Prelude
import Hasura.RQL.DDL.Schema (RunSQLRes (..))
import Hasura.RQL.Types.Common (SourceName (), sourceNameToText)
import Hasura.RQL.Types.SchemaCache (askSourceConfig)
import Hasura.RQL.Types.SchemaCache.Build (CacheRWM, MetadataM)
import Hasura.SQL.Backend (BackendType (DataConnector))
import Servant.Client (mkClientEnv, runClientM, (//))
import Servant.Client.Generic (genericClient)
import Witch qualified
data DataConnectorRunSQL = DataConnectorRunSQL
{ _dcSource :: SourceName,
_dcSql :: Text
}
deriving (Show, Eq)
instance J.FromJSON DataConnectorRunSQL where
parseJSON = J.withObject "DataConnectorRunSQL" $ \o -> do
_dcSql <- o J..: "sql"
_dcSource <- o J..: "source"
do
-- Throw errors on unsupported operations
cascade <- o J..:? "cascade"
when (cascade == Just True) do
fail "Cascade not supported for raw data connector queries"
readOnly <- o J..:? "read_only"
when (readOnly == Just True) do
fail "Read-only not supported for raw data connector queries"
pure DataConnectorRunSQL {..}
instance J.ToJSON DataConnectorRunSQL where
toJSON DataConnectorRunSQL {..} =
J.object
[ "sql" J..= _dcSql
]
-- TODO:
--
-- This is defined in the same manner as runSQL variants for other existing backends.
--
-- The pattern used here should be improved since:
-- * It is brittle: Not as type-safe as it could be
-- * It is slow: Doesn't reuse schema-cache
-- * It is verbose: Code duplication i.e. templates
-- * It is incorrect: Uses runClientM directly without tracing capabilities
--
-- The intent is to refactor all usage of raw sql queries rather than try to fix everything
-- in this PR.
--
runSQL ::
forall m.
(MonadIO m, CacheRWM m, MonadError QErr m, MetadataM m) =>
DataConnectorName ->
DataConnectorRunSQL ->
m EncJSON
runSQL methodConnectorName DataConnectorRunSQL {..} = do
SourceConfig {..} <- askSourceConfig @'DataConnector _dcSource
-- There is no way to know if the source prefix matches the backend type until we have `SourceConfig` available.
unless (_scDataConnectorName == methodConnectorName) do
throwError
( err400
DataConnectorError
( "run_sql query referencing connector type " <> Witch.from methodConnectorName
<> " not supported on source "
<> sourceNameToText _dcSource
<> " for data connector of type "
<> Witch.from _scDataConnectorName
)
)
let clientEnv = mkClientEnv _scManager _scEndpoint
let client = (genericClient // API._raw) (toTxt _dcSource) _scConfig (RawRequest _dcSql)
resultsE <- liftIO $ runClientM client clientEnv
case tupleRows <$> resultsE of
Left e ->
throwError
(err400 DataConnectorError "Error performing raw query to data connector")
{ qeInternal = Just (ExtraInternal (J.String (tshow e)))
}
Right [] -> pure $ encJFromJValue $ RunSQLRes "CommandOk" J.Null
Right results@(firstRow : _) ->
let toRow = map snd
toHeader = map $ J.String . fst
in pure $ encJFromJValue $ RunSQLRes "TuplesOk" $ J.toJSON $ toHeader firstRow : map toRow results
tupleRows :: API.RawResponse -> [[(Text, J.Value)]]
tupleRows (API.RawResponse rs) = case rs of
[] -> []
xs@(x : _) ->
let ks = H.keys x
lookupKeys m = (\k -> maybe [] (pure . (k,)) $ H.lookup k m) =<< ks
in map lookupKeys xs

View File

@ -39,7 +39,7 @@ import Data.Data (Typeable)
import Data.List.NonEmpty qualified as NonEmpty
import Data.Text qualified as Text
import Data.Text.Extended (ToTxt (..))
import Data.Text.NonEmpty (NonEmptyText)
import Data.Text.NonEmpty (NonEmptyText (unNonEmptyText))
import Hasura.Backends.DataConnector.API qualified as API
import Hasura.Base.ErrorValue qualified as ErrorValue
import Hasura.Base.ToErrorValue (ToErrorValue (..))
@ -120,7 +120,7 @@ instance ToJSON SourceTimeout where
data SourceConfig = SourceConfig
{ _scEndpoint :: BaseUrl,
_scConfig :: API.Config,
_scTemplate :: Maybe Text, -- TODO: Use Parsed Kriti Template
_scTemplate :: Maybe Text, -- TODO: Use Parsed Kriti Template, specify template language
_scCapabilities :: API.Capabilities,
_scSchema :: API.SchemaResponse,
_scManager :: HTTP.Manager,
@ -156,6 +156,9 @@ newtype DataConnectorName = DataConnectorName {unDataConnectorName :: NonEmptyTe
instance Witch.From DataConnectorName NonEmptyText
instance Witch.From DataConnectorName Text where
from = unNonEmptyText . Witch.from
data DataConnectorOptions = DataConnectorOptions
{_dcoUri :: BaseUrl}
deriving stock (Eq, Ord, Show, Generic)

View File

@ -1,4 +1,4 @@
{-# LANGUAGE TemplateHaskell #-}
{-# LANGUAGE ViewPatterns #-}
-- | The RQL query ('/v2/query')
module Hasura.Server.API.V2Query
@ -10,12 +10,14 @@ where
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.Aeson.Types (Parser)
import Data.Environment qualified as Env
import Data.Text qualified as T
import Data.Text.NonEmpty (mkNonEmptyText)
import GHC.Generics.Extended (constrName)
import Hasura.Backends.BigQuery.DDL.RunSQL qualified as BigQuery
import Hasura.Backends.DataConnector.Adapter.RunSQL qualified as DataConnector
import Hasura.Backends.DataConnector.Adapter.Types (DataConnectorName (..))
import Hasura.Backends.MSSQL.DDL.RunSQL qualified as MSSQL
import Hasura.Backends.MySQL.SQL qualified as MySQL
import Hasura.Backends.Postgres.DDL.RunSQL qualified as Postgres
@ -30,6 +32,12 @@ import Hasura.RQL.DML.Delete
import Hasura.RQL.DML.Insert
import Hasura.RQL.DML.Select
import Hasura.RQL.DML.Types
( CountQuery,
DeleteQuery,
InsertQuery,
SelectQuery,
UpdateQuery,
)
import Hasura.RQL.DML.Update
import Hasura.RQL.Types.Metadata
import Hasura.RQL.Types.Run
@ -53,17 +61,34 @@ data RQLQuery
| RQCockroachRunSql !Postgres.RunSQL
| RQMysqlRunSql !MySQL.RunSQL
| RQBigqueryRunSql !BigQuery.BigQueryRunSQL
| RQDataConnectorRunSql !DataConnectorName !DataConnector.DataConnectorRunSQL
| RQBigqueryDatabaseInspection !BigQuery.BigQueryRunSQL
| RQBulk ![RQLQuery]
deriving (Generic)
$( deriveFromJSON
defaultOptions
{ constructorTagModifier = snakeCase . drop 2,
sumEncoding = TaggedObject "type" "args"
}
''RQLQuery
)
-- | This instance has been written by hand so that "wildcard" prefixes of _run_sql can be delegated to data connectors.
instance FromJSON RQLQuery where
parseJSON = withObject "RQLQuery" \o -> do
t <- o .: "type"
let args :: forall a. FromJSON a => Parser a
args = o .: "args"
dcNameFromRunSql = T.stripSuffix "_run_sql" >=> mkNonEmptyText >=> pure . DataConnectorName
case t of
"insert" -> RQInsert <$> args
"select" -> RQSelect <$> args
"update" -> RQUpdate <$> args
"delete" -> RQDelete <$> args
"count" -> RQCount <$> args
"run_sql" -> RQRunSql <$> args
"mssql_run_sql" -> RQMssqlRunSql <$> args
"citus_run_sql" -> RQCitusRunSql <$> args
"cockroach_run_sql" -> RQCockroachRunSql <$> args
"mysql_run_sql" -> RQMysqlRunSql <$> args
"bigquery_run_sql" -> RQBigqueryRunSql <$> args
(dcNameFromRunSql -> Just t') -> RQDataConnectorRunSql t' <$> args
"bigquery_database_inspection" -> RQBigqueryDatabaseInspection <$> args
"bulk" -> RQBulk <$> args
_ -> fail $ "Unrecognised RQLQuery type: " <> T.unpack t
runQuery ::
( MonadIO m,
@ -127,6 +152,7 @@ queryModifiesSchema = \case
RQMssqlRunSql q -> MSSQL.isSchemaCacheBuildRequiredRunSQL q
RQMysqlRunSql _ -> False
RQBigqueryRunSql _ -> False
RQDataConnectorRunSql _ _ -> False
RQBigqueryDatabaseInspection _ -> False
RQBulk l -> any queryModifiesSchema l
@ -156,6 +182,7 @@ runQueryM env rq = Tracing.trace (T.pack $ constrName rq) $ case rq of
RQCitusRunSql q -> Postgres.runRunSQL @'Citus q
RQCockroachRunSql q -> Postgres.runRunSQL @'Cockroach q
RQBigqueryRunSql q -> BigQuery.runSQL q
RQDataConnectorRunSql t q -> DataConnector.runSQL t q
RQBigqueryDatabaseInspection q -> BigQuery.runDatabaseInspection q
RQBulk l -> encJFromList <$> indexedMapM (runQueryM env) l
@ -172,5 +199,6 @@ queryModifiesUserDB = \case
RQMssqlRunSql _ -> True
RQMysqlRunSql _ -> True
RQBigqueryRunSql _ -> True
RQDataConnectorRunSql _ _ -> True
RQBigqueryDatabaseInspection _ -> False
RQBulk q -> any queryModifiesUserDB q

View File

@ -133,6 +133,9 @@ genMetricsCapabilities = pure MetricsCapabilities {}
genExplainCapabilities :: MonadGen m => m ExplainCapabilities
genExplainCapabilities = pure ExplainCapabilities {}
genRawCapabilities :: MonadGen m => m RawCapabilities
genRawCapabilities = pure RawCapabilities {}
genCapabilities :: Gen Capabilities
genCapabilities =
Capabilities
@ -145,6 +148,7 @@ genCapabilities =
<*> Gen.maybe genComparisonCapabilities
<*> Gen.maybe genMetricsCapabilities
<*> Gen.maybe genExplainCapabilities
<*> Gen.maybe genRawCapabilities
emptyConfigSchemaResponse :: ConfigSchemaResponse
emptyConfigSchemaResponse = ConfigSchemaResponse mempty mempty

View File

@ -45,7 +45,8 @@ capabilities =
{ API._ccSubqueryComparisonCapabilities = Just API.SubqueryComparisonCapabilities {API._ctccSupportsRelations = True}
},
API._cMetrics = Just API.MetricsCapabilities {},
API._cExplain = Just API.ExplainCapabilities {}
API._cExplain = Just API.ExplainCapabilities {},
API._cRaw = Just API.RawCapabilities {}
},
_crConfigSchemaResponse =
API.ConfigSchemaResponse
@ -573,6 +574,9 @@ healthcheckHandler _sourceName _config = pure NoContent
metricsHandler :: Handler Text
metricsHandler = pure "# NOTE: Metrics would go here."
rawHandler :: API.SourceName -> API.Config -> API.RawRequest -> Handler API.RawResponse
rawHandler _ _ _ = pure $ API.RawResponse [] -- NOTE: Raw query response would go here.
dcMockableServer :: I.IORef MockConfig -> I.IORef (Maybe API.QueryRequest) -> I.IORef (Maybe API.Config) -> Server API.Api
dcMockableServer mcfg mquery mQueryConfig =
mockCapabilitiesHandler mcfg
@ -581,6 +585,7 @@ dcMockableServer mcfg mquery mQueryConfig =
:<|> explainHandler
:<|> healthcheckHandler
:<|> metricsHandler
:<|> rawHandler
mockAgentPort :: Warp.Port
mockAgentPort = 65006

View File

@ -57,6 +57,7 @@ defaultBackendCapabilities = \case
supports_relations: true
explain: {}
metrics: {}
raw: {}
queries:
supports_primary_keys: true
|]