mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 08:02:15 +03:00
server: add MSSQL support
Co-authored-by: Rakesh Emmadi <12475069+rakeshkky@users.noreply.github.com> Co-authored-by: Antoine Leblanc <1618949+nicuveo@users.noreply.github.com> Co-authored-by: Vamshi Surabhi <6562944+0x777@users.noreply.github.com> Co-authored-by: Aravind K P <8335904+scriptonist@users.noreply.github.com> GitOrigin-RevId: 699c453b9692e1b822f393f23ff5e6db4e010d57
This commit is contained in:
parent
bd495b4aae
commit
281cb771ff
@ -10,6 +10,10 @@ RUN go get github.com/mitchellh/gox \
|
|||||||
# install UPX and netcat
|
# install UPX and netcat
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
xz-utils netcat libpq5 postgresql-client \
|
xz-utils netcat libpq5 postgresql-client \
|
||||||
|
&& curl -s https://packages.microsoft.com/config/debian/9/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||||
|
&& curl -s https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
|
||||||
|
&& apt-get update \
|
||||||
|
&& ACCEPT_EULA=Y apt-get install -y ca-certificates libkrb5-3 libpq5 libnuma1 unixodbc-dev msodbcsql17 \
|
||||||
&& curl -Lo /tmp/upx-${upx_version}.tar.xz https://github.com/upx/upx/releases/download/v${upx_version}/upx-${upx_version}-amd64_linux.tar.xz \
|
&& curl -Lo /tmp/upx-${upx_version}.tar.xz https://github.com/upx/upx/releases/download/v${upx_version}/upx-${upx_version}-amd64_linux.tar.xz \
|
||||||
&& xz -d -c /tmp/upx-${upx_version}.tar.xz \
|
&& xz -d -c /tmp/upx-${upx_version}.tar.xz \
|
||||||
| tar -xOf - upx-${upx_version}-amd64_linux/upx > /bin/upx \
|
| tar -xOf - upx-${upx_version}-amd64_linux/upx > /bin/upx \
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
# anchor refs to be used elsewhere
|
# anchor refs to be used elsewhere
|
||||||
refs:
|
refs:
|
||||||
constants:
|
constants:
|
||||||
- &server_builder_image hasura/graphql-engine-server-builder:2020-11-05
|
- &server_builder_image hasura/graphql-engine-server-builder:0a016555f4fcea810ea4efcd024c11cb66cb4921928e5e59f5c8e44338a05287
|
||||||
skip_job_on_ciignore: &skip_job_on_ciignore
|
skip_job_on_ciignore: &skip_job_on_ciignore
|
||||||
run:
|
run:
|
||||||
name: checking if job should be terminated or not
|
name: checking if job should be terminated or not
|
||||||
@ -113,7 +113,12 @@ refs:
|
|||||||
command: |
|
command: |
|
||||||
mkdir -p /usr/share/man/man{1,7}
|
mkdir -p /usr/share/man/man{1,7}
|
||||||
apt-get update
|
apt-get update
|
||||||
apt install --yes pgbouncer jq curl postgresql-client-13
|
apt install --yes curl apt-transport-https
|
||||||
|
curl -s https://packages.microsoft.com/config/debian/9/prod.list > /etc/apt/sources.list.d/mssql-release.list
|
||||||
|
curl -s https://packages.microsoft.com/keys/microsoft.asc | apt-key add -
|
||||||
|
apt-get update
|
||||||
|
apt install --yes pgbouncer jq postgresql-client-13 g++ gcc libc6-dev unixodbc-dev
|
||||||
|
ACCEPT_EULA=Y apt install --yes msodbcsql17
|
||||||
- run:
|
- run:
|
||||||
name: Ensure databases are present
|
name: Ensure databases are present
|
||||||
environment:
|
environment:
|
||||||
@ -405,7 +410,7 @@ jobs:
|
|||||||
# test and build cli
|
# test and build cli
|
||||||
test_and_build_cli:
|
test_and_build_cli:
|
||||||
docker:
|
docker:
|
||||||
- image: hasura/graphql-engine-cli-builder:20201105
|
- image: hasura/graphql-engine-cli-builder:20210223
|
||||||
- image: circleci/postgres:10-alpine
|
- image: circleci/postgres:10-alpine
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_USER: gql_test
|
POSTGRES_USER: gql_test
|
||||||
|
@ -1,6 +1,11 @@
|
|||||||
# Hasura GraphQL Engine Changelog
|
# Hasura GraphQL Engine Changelog
|
||||||
|
|
||||||
## Next release
|
## Next release
|
||||||
|
### MSSQL support
|
||||||
|
|
||||||
|
It's now possible to add a MSSQL server as a source. For now, only read-only queries and subscriptions are supported.
|
||||||
|
FIXME FIXME expand on this.
|
||||||
|
|
||||||
### Inconsistent Metadata
|
### Inconsistent Metadata
|
||||||
|
|
||||||
Add `allow_inconsistent_metadata` option to `replace_metadata` API.
|
Add `allow_inconsistent_metadata` option to `replace_metadata` API.
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
#!/bin/bash
|
#!/usr/bin/env bash
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
shopt -s globstar
|
shopt -s globstar
|
||||||
|
|
||||||
@ -39,6 +39,10 @@ Available COMMANDs:
|
|||||||
Launch a postgres container suitable for use with graphql-engine, watch its logs,
|
Launch a postgres container suitable for use with graphql-engine, watch its logs,
|
||||||
clean up nicely after
|
clean up nicely after
|
||||||
|
|
||||||
|
mssql
|
||||||
|
Launch a MSSQL container suitable for use with graphql-engine, watch its logs,
|
||||||
|
clean up nicely after
|
||||||
|
|
||||||
test [--integration [pytest_args...] | --unit | --hlint]
|
test [--integration [pytest_args...] | --unit | --hlint]
|
||||||
Run the unit and integration tests, handling spinning up all dependencies.
|
Run the unit and integration tests, handling spinning up all dependencies.
|
||||||
This will force a recompile. A combined code coverage report will be
|
This will force a recompile. A combined code coverage report will be
|
||||||
@ -81,6 +85,8 @@ case "${1-}" in
|
|||||||
;;
|
;;
|
||||||
postgres)
|
postgres)
|
||||||
;;
|
;;
|
||||||
|
mssql)
|
||||||
|
;;
|
||||||
test)
|
test)
|
||||||
case "${2-}" in
|
case "${2-}" in
|
||||||
--unit)
|
--unit)
|
||||||
@ -144,31 +150,49 @@ fi
|
|||||||
if [ "$MODE" = "test" ]; then
|
if [ "$MODE" = "test" ]; then
|
||||||
# Choose a different port so PG is totally disposable:
|
# Choose a different port so PG is totally disposable:
|
||||||
PG_PORT=35432
|
PG_PORT=35432
|
||||||
|
MSSQL_PORT=31433
|
||||||
else
|
else
|
||||||
PG_PORT=25432
|
PG_PORT=25432
|
||||||
|
MSSQL_PORT=21433
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# export for psql, etc.
|
# export for psql, etc.
|
||||||
export PGPASSWORD=postgres
|
export PGPASSWORD=postgres
|
||||||
|
# needs at least 8 characters, and lowercase, uppercase and number
|
||||||
|
export MSSQLPASSWORD="hasuraMSSQL1"
|
||||||
|
|
||||||
# The URL for the postgres server we might launch
|
# The URL for the postgres server we might launch
|
||||||
CONTAINER_DB_URL="postgres://postgres:$PGPASSWORD@127.0.0.1:$PG_PORT/postgres"
|
POSTGRES_DB_URL="postgres://postgres:$PGPASSWORD@127.0.0.1:$PG_PORT/postgres"
|
||||||
# ... but we might like to use a different PG instance when just launching graphql-engine:
|
# ... but we might like to use a different PG instance when just launching graphql-engine:
|
||||||
HASURA_GRAPHQL_DATABASE_URL=${HASURA_GRAPHQL_DATABASE_URL-$CONTAINER_DB_URL}
|
HASURA_GRAPHQL_DATABASE_URL=${HASURA_GRAPHQL_DATABASE_URL-$POSTGRES_DB_URL}
|
||||||
|
|
||||||
PG_CONTAINER_NAME="hasura-dev-postgres-$PG_PORT"
|
PG_CONTAINER_NAME="hasura-dev-postgres-$PG_PORT"
|
||||||
|
MSSQL_CONTAINER_NAME="hasura-dev-mssql-$MSSQL_PORT"
|
||||||
|
|
||||||
# We can remove psql as a dependency by using it from the (running) PG container:
|
# We can remove psql as a dependency by using it from the (running) PG container:
|
||||||
DOCKER_PSQL="docker exec -u postgres -it $PG_CONTAINER_NAME psql $HASURA_GRAPHQL_DATABASE_URL"
|
DOCKER_PSQL="docker exec -u postgres -it $PG_CONTAINER_NAME psql $HASURA_GRAPHQL_DATABASE_URL"
|
||||||
|
|
||||||
function wait_postgres {
|
function wait_postgres {
|
||||||
echo -n "Waiting for postgres to come up"
|
echo -n "Waiting for postgres to come up"
|
||||||
until ( $DOCKER_PSQL -c '\l' || psql $HASURA_GRAPHQL_DATABASE_URL -c '\l') &>/dev/null; do
|
until ( $DOCKER_PSQL -c '\l' ) &>/dev/null; do
|
||||||
echo -n '.' && sleep 0.2
|
echo -n '.' && sleep 0.2
|
||||||
done
|
done
|
||||||
echo " Ok"
|
echo " Ok"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function wait_mssql {
|
||||||
|
set +e
|
||||||
|
echo -n "Waiting for mssql to come up"
|
||||||
|
docker exec -t $MSSQL_CONTAINER_NAME /opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P $MSSQLPASSWORD -Q "SELECT 1" &>/dev/null
|
||||||
|
while [ $? -eq 0 ];
|
||||||
|
do
|
||||||
|
echo -n '.' && sleep 0.2
|
||||||
|
docker exec -t $MSSQL_CONTAINER_NAME /opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P $MSSQLPASSWORD -Q "SELECT 1" &>/dev/null
|
||||||
|
done
|
||||||
|
set -e
|
||||||
|
echo " Ok"
|
||||||
|
}
|
||||||
|
|
||||||
#################################
|
#################################
|
||||||
### Graphql-engine ###
|
### Graphql-engine ###
|
||||||
#################################
|
#################################
|
||||||
@ -284,7 +308,7 @@ fi
|
|||||||
# https://forums.aws.amazon.com/thread.jspa?threadID=291285
|
# https://forums.aws.amazon.com/thread.jspa?threadID=291285
|
||||||
#
|
#
|
||||||
# All lines up to log_error_verbosity are to support pgBadger:
|
# All lines up to log_error_verbosity are to support pgBadger:
|
||||||
# https://github.com/darold/pgbadger#LOG-STATEMENTS
|
# https://github.com/darold/pgbadger#LOG-STATEMENTS
|
||||||
#
|
#
|
||||||
# Also useful:
|
# Also useful:
|
||||||
# log_autovacuum_min_duration=0
|
# log_autovacuum_min_duration=0
|
||||||
@ -371,12 +395,58 @@ if [ "$MODE" = "postgres" ]; then
|
|||||||
echo_pretty " $ PGPASSWORD="$PGPASSWORD" psql -h 127.0.0.1 -p "$PG_PORT" postgres -U postgres"
|
echo_pretty " $ PGPASSWORD="$PGPASSWORD" psql -h 127.0.0.1 -p "$PG_PORT" postgres -U postgres"
|
||||||
echo_pretty ""
|
echo_pretty ""
|
||||||
echo_pretty "Here is the database URL:"
|
echo_pretty "Here is the database URL:"
|
||||||
echo_pretty " $CONTAINER_DB_URL"
|
echo_pretty " $POSTGRES_DB_URL"
|
||||||
echo_pretty ""
|
echo_pretty ""
|
||||||
echo_pretty "If you want to launch a 'graphql-engine' that works with this database:"
|
echo_pretty "If you want to launch a 'graphql-engine' that works with this database:"
|
||||||
echo_pretty " $ $0 graphql-engine"
|
echo_pretty " $ $0 graphql-engine"
|
||||||
# Runs continuously until CTRL-C, jumping to cleanup() above:
|
# Runs continuously until CTRL-C, jumping to cleanup() above:
|
||||||
docker logs -f --tail=0 "$PG_CONTAINER_NAME"
|
docker logs -f --tail=0 "$PG_CONTAINER_NAME"
|
||||||
|
fi
|
||||||
|
|
||||||
|
#################################
|
||||||
|
### MSSQL Container ###
|
||||||
|
#################################
|
||||||
|
|
||||||
|
function launch_mssql_container(){
|
||||||
|
echo_pretty "Launching MSSQL container: $MSSQL_CONTAINER_NAME"
|
||||||
|
docker run --name "$MSSQL_CONTAINER_NAME" --net=host \
|
||||||
|
-e SA_PASSWORD="$MSSQLPASSWORD" -e "ACCEPT_EULA=Y" -d mcr.microsoft.com/mssql/server:2019-CU8-ubuntu-16.04
|
||||||
|
|
||||||
|
# Since launching the postgres container worked we can set up cleanup routines. This will catch CTRL-C
|
||||||
|
function cleanup {
|
||||||
|
echo
|
||||||
|
|
||||||
|
if [ ! -z "${GRAPHQL_ENGINE_PID-}" ]; then
|
||||||
|
# Kill the cabal new-run and its children. This may already have been killed:
|
||||||
|
pkill -P "$GRAPHQL_ENGINE_PID" &>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$MODE" in
|
||||||
|
test|mssql)
|
||||||
|
echo_pretty "Removing $MSSQL_CONTAINER_NAME and its volumes in 5 seconds!"
|
||||||
|
echo_pretty " PRESS CTRL-C TO ABORT removal, or ENTER to clean up right away"
|
||||||
|
read -t5 || true
|
||||||
|
docker stop "$MSSQL_CONTAINER_NAME"
|
||||||
|
docker rm -v "$MSSQL_CONTAINER_NAME"
|
||||||
|
;;
|
||||||
|
graphql-engine)
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
echo_pretty "Done"
|
||||||
|
}
|
||||||
|
trap cleanup EXIT
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if [ "$MODE" = "mssql" ]; then
|
||||||
|
launch_mssql_container
|
||||||
|
wait_mssql
|
||||||
|
echo_pretty "MSSQL logs will start to show up in realtime here. Press CTRL-C to exit and "
|
||||||
|
echo_pretty "shutdown this container."
|
||||||
|
# Runs continuously until CTRL-C, jumping to cleanup() above:
|
||||||
|
docker logs -f --tail=0 "$MSSQL_CONTAINER_NAME"
|
||||||
|
|
||||||
|
|
||||||
elif [ "$MODE" = "test" ]; then
|
elif [ "$MODE" = "test" ]; then
|
||||||
########################################
|
########################################
|
||||||
@ -406,14 +476,14 @@ elif [ "$MODE" = "test" ]; then
|
|||||||
# These also depend on a running DB:
|
# These also depend on a running DB:
|
||||||
if [ "$RUN_UNIT_TESTS" = true ]; then
|
if [ "$RUN_UNIT_TESTS" = true ]; then
|
||||||
echo_pretty "Running Haskell test suite"
|
echo_pretty "Running Haskell test suite"
|
||||||
HASURA_GRAPHQL_DATABASE_URL="$CONTAINER_DB_URL" cabal new-run --project-file=cabal.project.dev-sh -- test:graphql-engine-tests
|
HASURA_GRAPHQL_DATABASE_URL="$POSTGRES_DB_URL" cabal new-run --project-file=cabal.project.dev-sh -- test:graphql-engine-tests
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ "$RUN_INTEGRATION_TESTS" = true ]; then
|
if [ "$RUN_INTEGRATION_TESTS" = true ]; then
|
||||||
GRAPHQL_ENGINE_TEST_LOG=/tmp/hasura-dev-test-engine.log
|
GRAPHQL_ENGINE_TEST_LOG=/tmp/hasura-dev-test-engine.log
|
||||||
echo_pretty "Starting graphql-engine, logging to $GRAPHQL_ENGINE_TEST_LOG"
|
echo_pretty "Starting graphql-engine, logging to $GRAPHQL_ENGINE_TEST_LOG"
|
||||||
export HASURA_GRAPHQL_SERVER_PORT=8088
|
export HASURA_GRAPHQL_SERVER_PORT=8088
|
||||||
cabal new-run --project-file=cabal.project.dev-sh -- exe:graphql-engine --database-url="$CONTAINER_DB_URL" serve --stringify-numeric-types \
|
cabal new-run --project-file=cabal.project.dev-sh -- exe:graphql-engine --database-url="$POSTGRES_DB_URL" serve --stringify-numeric-types \
|
||||||
--enable-console --console-assets-dir ../console/static/dist \
|
--enable-console --console-assets-dir ../console/static/dist \
|
||||||
&> "$GRAPHQL_ENGINE_TEST_LOG" & GRAPHQL_ENGINE_PID=$!
|
&> "$GRAPHQL_ENGINE_TEST_LOG" & GRAPHQL_ENGINE_PID=$!
|
||||||
|
|
||||||
@ -475,7 +545,7 @@ elif [ "$MODE" = "test" ]; then
|
|||||||
|
|
||||||
|
|
||||||
# TODO MAYBE: fix deprecation warnings, make them an error
|
# TODO MAYBE: fix deprecation warnings, make them an error
|
||||||
if pytest -W ignore::DeprecationWarning --hge-urls http://127.0.0.1:$HASURA_GRAPHQL_SERVER_PORT --pg-urls "$CONTAINER_DB_URL" $PYTEST_ARGS; then
|
if pytest -W ignore::DeprecationWarning --hge-urls http://127.0.0.1:$HASURA_GRAPHQL_SERVER_PORT --pg-urls "$POSTGRES_DB_URL" $PYTEST_ARGS; then
|
||||||
PASSED=true
|
PASSED=true
|
||||||
else
|
else
|
||||||
PASSED=false
|
PASSED=false
|
||||||
|
@ -70,7 +70,7 @@
|
|||||||
- ignore: {name: Use sequenceA}
|
- ignore: {name: Use sequenceA}
|
||||||
- ignore: {name: Use camelCase}
|
- ignore: {name: Use camelCase}
|
||||||
- ignore: {name: Redundant return}
|
- ignore: {name: Redundant return}
|
||||||
- ignore: {name: Use <$>, within: Hasura.RQL.DDL.Metadata}
|
- ignore: {name: Use <$>, within: [Hasura.RQL.DDL.Metadata, Hasura.Backends.MSSQL.Types.Instances]}
|
||||||
- ignore: {name: Functor law, within: Hasura.Server.AuthSpec}
|
- ignore: {name: Functor law, within: Hasura.Server.AuthSpec}
|
||||||
|
|
||||||
# Define some custom infix operators
|
# Define some custom infix operators
|
||||||
|
@ -4,7 +4,9 @@ VERSION ?= $(shell ../scripts/get-version.sh)
|
|||||||
export VERSION
|
export VERSION
|
||||||
|
|
||||||
registry := hasura
|
registry := hasura
|
||||||
packager_ver := 20190731
|
# This packager version is built using the packeger.df in the packaging folder:
|
||||||
|
# docker build -t "hasura/graphql-engine-packager:20210218" -f packager.df .
|
||||||
|
packager_ver := 20210218
|
||||||
pg_dump_ver := 13
|
pg_dump_ver := 13
|
||||||
build_output := /build/_server_output
|
build_output := /build/_server_output
|
||||||
|
|
||||||
@ -50,12 +52,10 @@ ci-build:
|
|||||||
# assumes this is built in circleci
|
# assumes this is built in circleci
|
||||||
ci-image:
|
ci-image:
|
||||||
mkdir -p packaging/build/rootfs
|
mkdir -p packaging/build/rootfs
|
||||||
docker create -v /root/ --name dummy alpine:3.4 /bin/true
|
cp '$(build_output)/graphql-engine' packaging/build/rootfs
|
||||||
docker cp '$(build_output)/graphql-engine' dummy:/root/
|
strip --strip-unneeded packaging/build/rootfs/graphql-engine
|
||||||
docker run --rm --volumes-from dummy '$(registry)/graphql-engine-packager:$(packager_ver)' /build.sh graphql-engine | tar -x -C packaging/build/rootfs
|
cp '/usr/lib/postgresql/$(pg_dump_ver)/bin/pg_dump' packaging/build/rootfs/pg_dump
|
||||||
strip --strip-unneeded packaging/build/rootfs/bin/graphql-engine
|
upx packaging/build/rootfs/graphql-engine
|
||||||
cp '/usr/lib/postgresql/$(pg_dump_ver)/bin/pg_dump' packaging/build/rootfs/bin/pg_dump
|
|
||||||
upx packaging/build/rootfs/bin/graphql-engine
|
|
||||||
docker build -t '$(registry)/graphql-engine:$(VERSION)' packaging/build/
|
docker build -t '$(registry)/graphql-engine:$(VERSION)' packaging/build/
|
||||||
|
|
||||||
ci-save-image:
|
ci-save-image:
|
||||||
|
@ -59,3 +59,16 @@ source-repository-package
|
|||||||
type: git
|
type: git
|
||||||
location: https://github.com/hasura/pool.git
|
location: https://github.com/hasura/pool.git
|
||||||
tag: bc4c3f739a8fb8ec4444336a34662895831c9acf
|
tag: bc4c3f739a8fb8ec4444336a34662895831c9acf
|
||||||
|
|
||||||
|
source-repository-package
|
||||||
|
type: git
|
||||||
|
location: https://github.com/fpco/odbc.git
|
||||||
|
tag: 95cefd30a0daf4a9cc99e745beeea4034232e8ca
|
||||||
|
|
||||||
|
package odbc
|
||||||
|
ghc-options: -Wwarn
|
||||||
|
-- Our CI compiles with -Werror, which is also applied to those packages
|
||||||
|
-- while it's fine for packages we maintain, we can't actually enforce
|
||||||
|
-- that third-party packages are warning-free, hence this -Wno-error.
|
||||||
|
-- When the changes in odbc are released, we can instead depend on
|
||||||
|
-- the hackage version, and remove it from this list of packages.
|
||||||
|
@ -76,6 +76,7 @@ constraints: any.Cabal ==3.2.0.0,
|
|||||||
any.concise ==0.1.0.1,
|
any.concise ==0.1.0.1,
|
||||||
any.concurrent-output ==1.10.12,
|
any.concurrent-output ==1.10.12,
|
||||||
any.conduit ==1.3.4,
|
any.conduit ==1.3.4,
|
||||||
|
any.conduit-extra ==1.3.5,
|
||||||
any.connection ==0.3.1,
|
any.connection ==0.3.1,
|
||||||
any.constraints ==0.12,
|
any.constraints ==0.12,
|
||||||
any.constraints-extras ==0.3.0.2,
|
any.constraints-extras ==0.3.0.2,
|
||||||
@ -121,6 +122,8 @@ constraints: any.Cabal ==3.2.0.0,
|
|||||||
distributive +semigroups +tagged,
|
distributive +semigroups +tagged,
|
||||||
any.dlist ==1.0,
|
any.dlist ==1.0,
|
||||||
dlist -werror,
|
dlist -werror,
|
||||||
|
any.double-conversion ==2.0.2.0,
|
||||||
|
double-conversion -developer,
|
||||||
any.easy-file ==0.2.2,
|
any.easy-file ==0.2.2,
|
||||||
any.either ==5.0.1.1,
|
any.either ==5.0.1.1,
|
||||||
any.ekg-core ==0.1.1.7,
|
any.ekg-core ==0.1.1.7,
|
||||||
@ -135,13 +138,17 @@ constraints: any.Cabal ==3.2.0.0,
|
|||||||
any.filepath ==1.4.2.1,
|
any.filepath ==1.4.2.1,
|
||||||
any.focus ==1.0.2,
|
any.focus ==1.0.2,
|
||||||
any.foldl ==1.4.10,
|
any.foldl ==1.4.10,
|
||||||
|
any.formatting ==7.1.1,
|
||||||
any.free ==5.1.6,
|
any.free ==5.1.6,
|
||||||
any.generic-arbitrary ==0.1.0,
|
any.generic-arbitrary ==0.1.0,
|
||||||
|
any.ghc ==8.10.2,
|
||||||
|
any.ghc-boot ==8.10.2,
|
||||||
any.ghc-boot-th ==8.10.2,
|
any.ghc-boot-th ==8.10.2,
|
||||||
any.ghc-heap ==8.10.2,
|
any.ghc-heap ==8.10.2,
|
||||||
any.ghc-heap-view ==0.6.2,
|
any.ghc-heap-view ==0.6.2,
|
||||||
ghc-heap-view -prim-supports-any,
|
ghc-heap-view -prim-supports-any,
|
||||||
any.ghc-prim ==0.6.1,
|
any.ghc-prim ==0.6.1,
|
||||||
|
any.ghci ==8.10.2,
|
||||||
any.happy ==1.20.0,
|
any.happy ==1.20.0,
|
||||||
any.hashable ==1.3.0.0,
|
any.hashable ==1.3.0.0,
|
||||||
hashable -examples +integer-gmp +sse2 -sse41,
|
hashable -examples +integer-gmp +sse2 -sse41,
|
||||||
@ -153,6 +160,7 @@ constraints: any.Cabal ==3.2.0.0,
|
|||||||
any.hasql-transaction ==1.0.0.1,
|
any.hasql-transaction ==1.0.0.1,
|
||||||
any.hedgehog ==1.0.4,
|
any.hedgehog ==1.0.4,
|
||||||
any.hourglass ==0.2.12,
|
any.hourglass ==0.2.12,
|
||||||
|
any.hpc ==0.6.1.0,
|
||||||
any.hsc2hs ==0.68.7,
|
any.hsc2hs ==0.68.7,
|
||||||
hsc2hs -in-ghc-tree,
|
hsc2hs -in-ghc-tree,
|
||||||
any.hspec ==2.7.8,
|
any.hspec ==2.7.8,
|
||||||
@ -165,6 +173,7 @@ constraints: any.Cabal ==3.2.0.0,
|
|||||||
any.http-client ==0.7.5,
|
any.http-client ==0.7.5,
|
||||||
http-client +network-uri,
|
http-client +network-uri,
|
||||||
any.http-client-tls ==0.3.5.3,
|
any.http-client-tls ==0.3.5.3,
|
||||||
|
any.http-conduit ==2.3.7.3,
|
||||||
any.http-date ==0.0.10,
|
any.http-date ==0.0.10,
|
||||||
any.http-types ==0.12.3,
|
any.http-types ==0.12.3,
|
||||||
any.http2 ==2.0.5,
|
any.http2 ==2.0.5,
|
||||||
@ -298,7 +307,9 @@ constraints: any.Cabal ==3.2.0.0,
|
|||||||
tagged +deepseq +transformers,
|
tagged +deepseq +transformers,
|
||||||
any.template-haskell ==2.16.0.0,
|
any.template-haskell ==2.16.0.0,
|
||||||
any.template-haskell-compat-v0208 ==0.1.5,
|
any.template-haskell-compat-v0208 ==0.1.5,
|
||||||
|
any.temporary ==1.3,
|
||||||
any.terminal-size ==0.3.2.1,
|
any.terminal-size ==0.3.2.1,
|
||||||
|
any.terminfo ==0.4.1.4,
|
||||||
any.text ==1.2.3.2,
|
any.text ==1.2.3.2,
|
||||||
any.text-builder ==0.6.6.1,
|
any.text-builder ==0.6.6.1,
|
||||||
any.text-conversions ==0.3.1,
|
any.text-conversions ==0.3.1,
|
||||||
@ -328,6 +339,7 @@ constraints: any.Cabal ==3.2.0.0,
|
|||||||
transformers-compat -five +five-three -four +generic-deriving +mtl -three -two,
|
transformers-compat -five +five-three -four +generic-deriving +mtl -three -two,
|
||||||
any.type-equality ==1,
|
any.type-equality ==1,
|
||||||
any.type-hint ==0.1,
|
any.type-hint ==0.1,
|
||||||
|
any.typed-process ==0.2.6.0,
|
||||||
any.unix ==2.7.2.2,
|
any.unix ==2.7.2.2,
|
||||||
any.unix-compat ==0.5.3,
|
any.unix-compat ==0.5.3,
|
||||||
unix-compat -old-time,
|
unix-compat -old-time,
|
||||||
@ -364,6 +376,7 @@ constraints: any.Cabal ==3.2.0.0,
|
|||||||
warp +allow-sendfilefd -network-bytestring -warp-debug,
|
warp +allow-sendfilefd -network-bytestring -warp-debug,
|
||||||
any.websockets ==0.12.7.2,
|
any.websockets ==0.12.7.2,
|
||||||
websockets -example,
|
websockets -example,
|
||||||
|
any.weigh ==0.0.16,
|
||||||
any.witherable ==0.4.1,
|
any.witherable ==0.4.1,
|
||||||
any.wl-pprint-annotated ==0.1.0.1,
|
any.wl-pprint-annotated ==0.1.0.1,
|
||||||
any.word8 ==0.1.3,
|
any.word8 ==0.1.3,
|
||||||
|
@ -108,6 +108,7 @@ library
|
|||||||
, validation
|
, validation
|
||||||
, lifted-base
|
, lifted-base
|
||||||
, pg-client
|
, pg-client
|
||||||
|
, http-conduit
|
||||||
, validation
|
, validation
|
||||||
, text
|
, text
|
||||||
, text-builder >= 0.6
|
, text-builder >= 0.6
|
||||||
@ -265,6 +266,10 @@ library
|
|||||||
, cron >= 0.6.2
|
, cron >= 0.6.2
|
||||||
-- needed for deriving via
|
-- needed for deriving via
|
||||||
, semigroups >= 0.19
|
, semigroups >= 0.19
|
||||||
|
|
||||||
|
-- mssql support
|
||||||
|
, odbc
|
||||||
|
|
||||||
if !flag(profiling)
|
if !flag(profiling)
|
||||||
build-depends:
|
build-depends:
|
||||||
-- 0.6.1 is supposedly not okay for ghc 8.6:
|
-- 0.6.1 is supposedly not okay for ghc 8.6:
|
||||||
@ -297,10 +302,26 @@ library
|
|||||||
, Hasura.Metadata.Class
|
, Hasura.Metadata.Class
|
||||||
|
|
||||||
, Hasura.Backends.Postgres.Connection
|
, Hasura.Backends.Postgres.Connection
|
||||||
|
, Hasura.Backends.Postgres.DDL
|
||||||
|
, Hasura.Backends.Postgres.DDL.BoolExp
|
||||||
|
, Hasura.Backends.Postgres.DDL.Field
|
||||||
|
, Hasura.Backends.Postgres.DDL.Function
|
||||||
|
, Hasura.Backends.Postgres.DDL.RunSQL
|
||||||
|
, Hasura.Backends.Postgres.DDL.Source
|
||||||
|
, Hasura.Backends.Postgres.DDL.Table
|
||||||
, Hasura.Backends.Postgres.Execute.LiveQuery
|
, Hasura.Backends.Postgres.Execute.LiveQuery
|
||||||
, Hasura.Backends.Postgres.Execute.Mutation
|
, Hasura.Backends.Postgres.Execute.Mutation
|
||||||
, Hasura.Backends.Postgres.Execute.RemoteJoin
|
, Hasura.Backends.Postgres.Execute.RemoteJoin
|
||||||
, Hasura.Backends.Postgres.Execute.Types
|
, Hasura.Backends.Postgres.Execute.Types
|
||||||
|
, Hasura.Backends.Postgres.Instances.Execute
|
||||||
|
, Hasura.Backends.Postgres.Instances.Schema
|
||||||
|
, Hasura.Backends.Postgres.Instances.Transport
|
||||||
|
, Hasura.Backends.Postgres.Instances.Types
|
||||||
|
, Hasura.Backends.Postgres.SQL.DML
|
||||||
|
, Hasura.Backends.Postgres.SQL.Error
|
||||||
|
, Hasura.Backends.Postgres.SQL.Rewrite
|
||||||
|
, Hasura.Backends.Postgres.SQL.Types
|
||||||
|
, Hasura.Backends.Postgres.SQL.Value
|
||||||
, Hasura.Backends.Postgres.Translate.BoolExp
|
, Hasura.Backends.Postgres.Translate.BoolExp
|
||||||
, Hasura.Backends.Postgres.Translate.Column
|
, Hasura.Backends.Postgres.Translate.Column
|
||||||
, Hasura.Backends.Postgres.Translate.Delete
|
, Hasura.Backends.Postgres.Translate.Delete
|
||||||
@ -310,18 +331,24 @@ library
|
|||||||
, Hasura.Backends.Postgres.Translate.Select
|
, Hasura.Backends.Postgres.Translate.Select
|
||||||
, Hasura.Backends.Postgres.Translate.Types
|
, Hasura.Backends.Postgres.Translate.Types
|
||||||
, Hasura.Backends.Postgres.Translate.Update
|
, Hasura.Backends.Postgres.Translate.Update
|
||||||
, Hasura.Backends.Postgres.SQL.DML
|
|
||||||
, Hasura.Backends.Postgres.SQL.Error
|
, Hasura.Backends.MSSQL.Connection
|
||||||
, Hasura.Backends.Postgres.SQL.Rewrite
|
, Hasura.Backends.MSSQL.DDL
|
||||||
, Hasura.Backends.Postgres.SQL.Types
|
, Hasura.Backends.MSSQL.DDL.RunSQL
|
||||||
, Hasura.Backends.Postgres.SQL.Value
|
, Hasura.Backends.MSSQL.DDL.Source
|
||||||
, Hasura.Backends.Postgres.DDL
|
, Hasura.Backends.MSSQL.DDL.BoolExp
|
||||||
, Hasura.Backends.Postgres.DDL.Table
|
, Hasura.Backends.MSSQL.FromIr
|
||||||
, Hasura.Backends.Postgres.DDL.Source
|
, Hasura.Backends.MSSQL.Instances.Execute
|
||||||
, Hasura.Backends.Postgres.DDL.Field
|
, Hasura.Backends.MSSQL.Instances.Schema
|
||||||
, Hasura.Backends.Postgres.DDL.Function
|
, Hasura.Backends.MSSQL.Instances.Transport
|
||||||
, Hasura.Backends.Postgres.DDL.BoolExp
|
, Hasura.Backends.MSSQL.Instances.Types
|
||||||
, Hasura.Backends.Postgres.DDL.RunSQL
|
, Hasura.Backends.MSSQL.Meta
|
||||||
|
, Hasura.Backends.MSSQL.Plan
|
||||||
|
, Hasura.Backends.MSSQL.Result
|
||||||
|
, Hasura.Backends.MSSQL.ToQuery
|
||||||
|
, Hasura.Backends.MSSQL.Types
|
||||||
|
, Hasura.Backends.MSSQL.Types.Instances
|
||||||
|
, Hasura.Backends.MSSQL.Types.Internal
|
||||||
|
|
||||||
-- Exposed for benchmark:
|
-- Exposed for benchmark:
|
||||||
, Hasura.Cache.Bounded
|
, Hasura.Cache.Bounded
|
||||||
@ -465,7 +492,6 @@ library
|
|||||||
, Hasura.GraphQL.Execute.LiveQuery.TMap
|
, Hasura.GraphQL.Execute.LiveQuery.TMap
|
||||||
, Hasura.GraphQL.Execute.Mutation
|
, Hasura.GraphQL.Execute.Mutation
|
||||||
, Hasura.GraphQL.Execute.Plan
|
, Hasura.GraphQL.Execute.Plan
|
||||||
, Hasura.GraphQL.Execute.Postgres
|
|
||||||
, Hasura.GraphQL.Execute.Prepare
|
, Hasura.GraphQL.Execute.Prepare
|
||||||
, Hasura.GraphQL.Execute.Remote
|
, Hasura.GraphQL.Execute.Remote
|
||||||
, Hasura.GraphQL.Execute.RemoteJoin
|
, Hasura.GraphQL.Execute.RemoteJoin
|
||||||
@ -498,7 +524,6 @@ library
|
|||||||
, Hasura.GraphQL.Transport.Backend
|
, Hasura.GraphQL.Transport.Backend
|
||||||
, Hasura.GraphQL.Transport.HTTP
|
, Hasura.GraphQL.Transport.HTTP
|
||||||
, Hasura.GraphQL.Transport.HTTP.Protocol
|
, Hasura.GraphQL.Transport.HTTP.Protocol
|
||||||
, Hasura.GraphQL.Transport.Postgres
|
|
||||||
, Hasura.GraphQL.Transport.WebSocket
|
, Hasura.GraphQL.Transport.WebSocket
|
||||||
, Hasura.GraphQL.Transport.WebSocket.Protocol
|
, Hasura.GraphQL.Transport.WebSocket.Protocol
|
||||||
, Hasura.GraphQL.Transport.WebSocket.Server
|
, Hasura.GraphQL.Transport.WebSocket.Server
|
||||||
@ -538,7 +563,7 @@ test-suite graphql-engine-tests
|
|||||||
import: common-all, common-exe
|
import: common-all, common-exe
|
||||||
type: exitcode-stdio-1.0
|
type: exitcode-stdio-1.0
|
||||||
build-depends:
|
build-depends:
|
||||||
, aeson
|
aeson
|
||||||
, base
|
, base
|
||||||
, bytestring
|
, bytestring
|
||||||
, containers
|
, containers
|
||||||
|
@ -1,4 +1,22 @@
|
|||||||
FROM scratch
|
FROM debian:stretch-20190228-slim
|
||||||
COPY rootfs/ /
|
|
||||||
ENV LANG=C.UTF-8 LC_ALL=C.UTF-8
|
ENV LANG=C.UTF-8 LC_ALL=C.UTF-8
|
||||||
|
|
||||||
|
RUN apt-get update \
|
||||||
|
&& apt-get install -y gnupg2 curl apt-transport-https \
|
||||||
|
&& curl -s https://packages.microsoft.com/config/debian/9/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||||
|
&& curl -s https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
|
||||||
|
&& apt-get update \
|
||||||
|
&& ACCEPT_EULA=Y apt-get install -y ca-certificates libkrb5-3 libpq5 libnuma1 unixodbc-dev msodbcsql17 \
|
||||||
|
&& apt-get -y remove curl gnupg2 \
|
||||||
|
&& apt-get -y auto-remove \
|
||||||
|
&& apt-get -y clean \
|
||||||
|
&& rm -rf /var/lib/apt/lists/* \
|
||||||
|
&& rm -rf /usr/share/doc/ \
|
||||||
|
&& rm -rf /usr/share/man/ \
|
||||||
|
&& rm -rf /usr/share/locale/
|
||||||
|
|
||||||
|
COPY rootfs/graphql-engine /bin/
|
||||||
|
COPY rootfs/pg_dump /bin/
|
||||||
|
COPY rootfs/srv/* /srv
|
||||||
|
|
||||||
CMD ["graphql-engine", "serve"]
|
CMD ["graphql-engine", "serve"]
|
||||||
|
@ -1,7 +1,12 @@
|
|||||||
FROM hasura/haskell-docker-packager:20190731
|
FROM hasura/haskell-docker-packager:20190731
|
||||||
MAINTAINER vamshi@hasura.io
|
MAINTAINER vamshi@hasura.io
|
||||||
|
|
||||||
RUN apt-get update && apt-get install -y libpq5 upx \
|
RUN apt-get update && apt-get install -y libpq5 curl apt-transport-https upx \
|
||||||
|
&& curl -s https://packages.microsoft.com/config/debian/9/prod.list > /etc/apt/sources.list.d/mssql-release.list \
|
||||||
|
&& curl -s https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
|
||||||
|
&& apt-get update \
|
||||||
|
&& apt install -y unixodbc-dev freetds-dev \
|
||||||
|
&& ACCEPT_EULA=Y apt install --yes msodbcsql17 \
|
||||||
&& update-ca-certificates \
|
&& update-ca-certificates \
|
||||||
&& mkdir -p /usr/src/busybox/rootfs/etc/ssl/certs \
|
&& mkdir -p /usr/src/busybox/rootfs/etc/ssl/certs \
|
||||||
&& cp -L /etc/ssl/certs/* /usr/src/busybox/rootfs/etc/ssl/certs/ \
|
&& cp -L /etc/ssl/certs/* /usr/src/busybox/rootfs/etc/ssl/certs/ \
|
||||||
|
@ -15,6 +15,7 @@ module Data.Text.Extended
|
|||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Database.ODBC.SQLServer as ODBC
|
||||||
import qualified Language.GraphQL.Draft.Printer as G
|
import qualified Language.GraphQL.Draft.Printer as G
|
||||||
import qualified Language.GraphQL.Draft.Syntax as G
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
import qualified Text.Builder as TB
|
import qualified Text.Builder as TB
|
||||||
@ -40,6 +41,10 @@ instance ToTxt Void where
|
|||||||
instance ToTxt (G.Value Void) where
|
instance ToTxt (G.Value Void) where
|
||||||
toTxt = TB.run . G.value
|
toTxt = TB.run . G.value
|
||||||
|
|
||||||
|
instance ToTxt ODBC.Query where
|
||||||
|
toTxt = ODBC.renderQuery
|
||||||
|
|
||||||
|
|
||||||
bquote :: ToTxt t => t -> Text
|
bquote :: ToTxt t => t -> Text
|
||||||
bquote t = DT.singleton '`' <> toTxt t <> DT.singleton '`'
|
bquote t = DT.singleton '`' <> toTxt t <> DT.singleton '`'
|
||||||
{-# INLINE bquote #-}
|
{-# INLINE bquote #-}
|
||||||
|
@ -31,9 +31,7 @@ mkNonEmptyTextUnsafe :: Text -> NonEmptyText
|
|||||||
mkNonEmptyTextUnsafe = NonEmptyText
|
mkNonEmptyTextUnsafe = NonEmptyText
|
||||||
|
|
||||||
parseNonEmptyText :: MonadFail m => Text -> m NonEmptyText
|
parseNonEmptyText :: MonadFail m => Text -> m NonEmptyText
|
||||||
parseNonEmptyText text = case mkNonEmptyText text of
|
parseNonEmptyText text = mkNonEmptyText text `onNothing` fail "empty string not allowed"
|
||||||
Nothing -> fail "empty string not allowed"
|
|
||||||
Just neText -> return neText
|
|
||||||
|
|
||||||
nonEmptyText :: Text -> Q (TExp NonEmptyText)
|
nonEmptyText :: Text -> Q (TExp NonEmptyText)
|
||||||
nonEmptyText = parseNonEmptyText >=> \text -> [|| text ||]
|
nonEmptyText = parseNonEmptyText >=> \text -> [|| text ||]
|
||||||
|
@ -501,8 +501,10 @@ runHGEServer setupHook env ServeOptions{..} ServeCtx{..} initTime postPollHook s
|
|||||||
maxEvThrds = fromMaybe defaultMaxEventThreads soEventsHttpPoolSize
|
maxEvThrds = fromMaybe defaultMaxEventThreads soEventsHttpPoolSize
|
||||||
fetchI = milliseconds $ fromMaybe (Milliseconds defaultFetchInterval) soEventsFetchInterval
|
fetchI = milliseconds $ fromMaybe (Milliseconds defaultFetchInterval) soEventsFetchInterval
|
||||||
logEnvHeaders = soLogHeadersFromEnv
|
logEnvHeaders = soLogHeadersFromEnv
|
||||||
allPgSources = mapMaybe (unsafeSourceConfiguration @'Postgres) $ HM.elems $ scPostgres $ lastBuiltSchemaCache _scSchemaCache
|
allPgSources = mapMaybe (unsafeSourceConfiguration @'Postgres) $ HM.elems $ scSources $ lastBuiltSchemaCache _scSchemaCache
|
||||||
|
|
||||||
|
-- TODO: is this correct?
|
||||||
|
-- event triggers should be tied to the life cycle of a source
|
||||||
lockedEventsCtx <- allocate
|
lockedEventsCtx <- allocate
|
||||||
(liftIO $ atomically initLockedEventsCtx)
|
(liftIO $ atomically initLockedEventsCtx)
|
||||||
(\lockedEventsCtx ->
|
(\lockedEventsCtx ->
|
||||||
|
36
server/src-lib/Hasura/Backends/MSSQL/Connection.hs
Normal file
36
server/src-lib/Hasura/Backends/MSSQL/Connection.hs
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
module Hasura.Backends.MSSQL.Connection where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import Data.Aeson
|
||||||
|
import Data.Aeson.TH
|
||||||
|
|
||||||
|
import Hasura.Incremental (Cacheable (..))
|
||||||
|
|
||||||
|
-- | ODBC connection string for MSSQL server
|
||||||
|
newtype MSSQLConnectionString
|
||||||
|
= MSSQLConnectionString {unMSSQLConnectionString :: Text}
|
||||||
|
deriving (Show, Eq, ToJSON, FromJSON, Cacheable, Hashable, NFData, Arbitrary)
|
||||||
|
|
||||||
|
data MSSQLConnectionInfo
|
||||||
|
= MSSQLConnectionInfo
|
||||||
|
{ _mciConnectionString :: !MSSQLConnectionString
|
||||||
|
} deriving (Show, Eq, Generic)
|
||||||
|
instance Cacheable MSSQLConnectionInfo
|
||||||
|
instance Hashable MSSQLConnectionInfo
|
||||||
|
instance NFData MSSQLConnectionInfo
|
||||||
|
instance Arbitrary MSSQLConnectionInfo where
|
||||||
|
arbitrary = genericArbitrary
|
||||||
|
$(deriveJSON hasuraJSON ''MSSQLConnectionInfo)
|
||||||
|
|
||||||
|
data MSSQLConnConfiguration
|
||||||
|
= MSSQLConnConfiguration
|
||||||
|
{ _mccConnectionInfo :: !MSSQLConnectionInfo
|
||||||
|
} deriving (Show, Eq, Generic)
|
||||||
|
instance Cacheable MSSQLConnConfiguration
|
||||||
|
instance Hashable MSSQLConnConfiguration
|
||||||
|
instance NFData MSSQLConnConfiguration
|
||||||
|
$(deriveJSON hasuraJSON ''MSSQLConnConfiguration)
|
||||||
|
|
||||||
|
instance Arbitrary MSSQLConnConfiguration where
|
||||||
|
arbitrary = genericArbitrary
|
141
server/src-lib/Hasura/Backends/MSSQL/DDL.hs
Normal file
141
server/src-lib/Hasura/Backends/MSSQL/DDL.hs
Normal file
@ -0,0 +1,141 @@
|
|||||||
|
module Hasura.Backends.MSSQL.DDL
|
||||||
|
( buildComputedFieldInfo
|
||||||
|
, buildRemoteFieldInfo
|
||||||
|
, fetchAndValidateEnumValues
|
||||||
|
, createTableEventTrigger
|
||||||
|
, buildEventTriggerInfo
|
||||||
|
, buildFunctionInfo
|
||||||
|
, updateColumnInEventTrigger
|
||||||
|
, parseCollectableType
|
||||||
|
, module M
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import Data.Aeson
|
||||||
|
|
||||||
|
import qualified Data.Environment as Env
|
||||||
|
|
||||||
|
import Hasura.RQL.IR.BoolExp
|
||||||
|
import Hasura.RQL.Types.Backend
|
||||||
|
import Hasura.RQL.Types.Column
|
||||||
|
import Hasura.RQL.Types.Common
|
||||||
|
import Hasura.RQL.Types.ComputedField
|
||||||
|
import Hasura.RQL.Types.Error
|
||||||
|
import Hasura.RQL.Types.EventTrigger
|
||||||
|
import Hasura.RQL.Types.Function
|
||||||
|
import Hasura.RQL.Types.RemoteRelationship
|
||||||
|
import Hasura.RQL.Types.SchemaCache
|
||||||
|
import Hasura.RQL.Types.Table
|
||||||
|
import Hasura.SQL.Backend
|
||||||
|
import Hasura.SQL.Types
|
||||||
|
import Hasura.Server.Types
|
||||||
|
import Hasura.Server.Utils
|
||||||
|
import Hasura.Session
|
||||||
|
|
||||||
|
import qualified Hasura.Backends.MSSQL.Types as MT
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.DDL.BoolExp as M
|
||||||
|
import Hasura.Backends.MSSQL.DDL.Source as M
|
||||||
|
|
||||||
|
|
||||||
|
buildComputedFieldInfo
|
||||||
|
:: (MonadError QErr m)
|
||||||
|
=> HashSet (TableName 'MSSQL)
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> ComputedFieldName
|
||||||
|
-> ComputedFieldDefinition 'MSSQL
|
||||||
|
-> RawFunctionInfo
|
||||||
|
-> Maybe Text
|
||||||
|
-> m (ComputedFieldInfo 'MSSQL)
|
||||||
|
buildComputedFieldInfo _ _ _ _ _ _ =
|
||||||
|
throw400 NotSupported "Computed fields aren't supported for MSSQL sources"
|
||||||
|
|
||||||
|
buildRemoteFieldInfo
|
||||||
|
:: (MonadError QErr m)
|
||||||
|
=> RemoteRelationship 'MSSQL
|
||||||
|
-> [ColumnInfo 'MSSQL]
|
||||||
|
-> RemoteSchemaMap
|
||||||
|
-> m (RemoteFieldInfo 'MSSQL, [SchemaDependency])
|
||||||
|
buildRemoteFieldInfo _ _ _ =
|
||||||
|
throw400 NotSupported "Remote joins aren't supported for MSSQL sources"
|
||||||
|
|
||||||
|
fetchAndValidateEnumValues
|
||||||
|
:: (Monad m)
|
||||||
|
=> SourceConfig 'MSSQL
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> Maybe (PrimaryKey 'MSSQL (RawColumnInfo 'MSSQL))
|
||||||
|
-> [RawColumnInfo 'MSSQL]
|
||||||
|
-> m (Either QErr EnumValues)
|
||||||
|
fetchAndValidateEnumValues _ _ _ _ = runExceptT $
|
||||||
|
throw400 NotSupported "Enum tables are not supported for MSSQL sources"
|
||||||
|
|
||||||
|
createTableEventTrigger
|
||||||
|
:: (Monad m)
|
||||||
|
=> ServerConfigCtx
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> [ColumnInfo 'MSSQL]
|
||||||
|
-> TriggerName
|
||||||
|
-> TriggerOpsDef
|
||||||
|
-> m (Either QErr ())
|
||||||
|
createTableEventTrigger _ _ _ _ _ _ = runExceptT $
|
||||||
|
throw400 NotSupported "Cannot create table event triggers in MSSQL sources"
|
||||||
|
|
||||||
|
buildEventTriggerInfo
|
||||||
|
:: MonadError QErr m
|
||||||
|
=> Env.Environment
|
||||||
|
-> SourceName
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> EventTriggerConf
|
||||||
|
-> m (EventTriggerInfo 'MSSQL, [SchemaDependency])
|
||||||
|
buildEventTriggerInfo _ _ _ _ =
|
||||||
|
throw400 NotSupported "Table event triggers are not supported for MSSQL sources"
|
||||||
|
|
||||||
|
buildFunctionInfo
|
||||||
|
:: (MonadError QErr m)
|
||||||
|
=> SourceName
|
||||||
|
-> FunctionName 'MSSQL
|
||||||
|
-> SystemDefined
|
||||||
|
-> FunctionConfig
|
||||||
|
-> [FunctionPermissionMetadata]
|
||||||
|
-> RawFunctionInfo
|
||||||
|
-> m (FunctionInfo 'MSSQL, SchemaDependency)
|
||||||
|
buildFunctionInfo _ _ _ _ _ _ =
|
||||||
|
throw400 NotSupported "SQL Functions are not supported for MSSQL source"
|
||||||
|
|
||||||
|
updateColumnInEventTrigger
|
||||||
|
:: TableName 'MSSQL
|
||||||
|
-> Column 'MSSQL
|
||||||
|
-> Column 'MSSQL
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> EventTriggerConf
|
||||||
|
-> EventTriggerConf
|
||||||
|
updateColumnInEventTrigger _ _ _ _ = id
|
||||||
|
|
||||||
|
parseCollectableType
|
||||||
|
:: (MonadError QErr m)
|
||||||
|
=> CollectableType (ColumnType 'MSSQL)
|
||||||
|
-> Value
|
||||||
|
-> m (PartialSQLExp 'MSSQL)
|
||||||
|
parseCollectableType collectableType = \case
|
||||||
|
String t
|
||||||
|
| isSessionVariable t -> pure $ mkTypedSessionVar collectableType $ mkSessionVariable t
|
||||||
|
| isReqUserId t -> pure $ mkTypedSessionVar collectableType userIdHeader
|
||||||
|
val -> case collectableType of
|
||||||
|
CollectableTypeScalar scalarType ->
|
||||||
|
PSESQLExp . MT.ValueExpression <$> parseScalarValueColumnType scalarType val
|
||||||
|
CollectableTypeArray _ ->
|
||||||
|
throw400 NotSupported "Array types are not supported in MSSQL backend"
|
||||||
|
|
||||||
|
mkTypedSessionVar
|
||||||
|
:: CollectableType (ColumnType 'MSSQL)
|
||||||
|
-> SessionVariable -> PartialSQLExp 'MSSQL
|
||||||
|
mkTypedSessionVar columnType =
|
||||||
|
PSESessVar (msColumnTypeToScalarType <$> columnType)
|
||||||
|
|
||||||
|
msColumnTypeToScalarType :: ColumnType 'MSSQL -> ScalarType 'MSSQL
|
||||||
|
msColumnTypeToScalarType = \case
|
||||||
|
ColumnScalar scalarType -> scalarType
|
||||||
|
ColumnEnumReference _ -> MT.TextType
|
67
server/src-lib/Hasura/Backends/MSSQL/DDL/BoolExp.hs
Normal file
67
server/src-lib/Hasura/Backends/MSSQL/DDL/BoolExp.hs
Normal file
@ -0,0 +1,67 @@
|
|||||||
|
module Hasura.Backends.MSSQL.DDL.BoolExp where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Data.Aeson as J
|
||||||
|
import qualified Data.HashMap.Strict as Map
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.Instances.Types ()
|
||||||
|
import Hasura.Backends.MSSQL.Types hiding (ColumnType)
|
||||||
|
import Hasura.RQL.IR.BoolExp
|
||||||
|
import Hasura.RQL.Types.Column
|
||||||
|
import Hasura.RQL.Types.Error
|
||||||
|
import Hasura.RQL.Types.SchemaCache
|
||||||
|
import Hasura.SQL.Backend
|
||||||
|
import Hasura.SQL.Types
|
||||||
|
|
||||||
|
parseBoolExpOperations
|
||||||
|
:: forall m v
|
||||||
|
. (MonadError QErr m) -- , TableCoreInfoRM 'MSSQL m)
|
||||||
|
=> ValueParser 'MSSQL m v
|
||||||
|
-> FieldInfoMap (FieldInfo 'MSSQL)
|
||||||
|
-> ColumnInfo 'MSSQL
|
||||||
|
-> J.Value
|
||||||
|
-> m [OpExpG 'MSSQL v]
|
||||||
|
parseBoolExpOperations rhsParser _fields columnInfo value =
|
||||||
|
withPathK (columnNameText $ pgiColumn columnInfo) $
|
||||||
|
parseOperations (pgiType columnInfo) value
|
||||||
|
where
|
||||||
|
parseWithTy ty = rhsParser (CollectableTypeScalar ty)
|
||||||
|
|
||||||
|
parseOperations :: ColumnType 'MSSQL -> J.Value -> m [OpExpG 'MSSQL v]
|
||||||
|
parseOperations columnType = \case
|
||||||
|
J.Object o -> mapM (parseOperation columnType) $ Map.toList o
|
||||||
|
v -> pure . AEQ False <$> parseWithTy columnType v
|
||||||
|
|
||||||
|
parseOperation :: ColumnType 'MSSQL -> (Text, J.Value) -> m (OpExpG 'MSSQL v)
|
||||||
|
parseOperation columnType (opStr, val) = withPathK opStr $
|
||||||
|
case opStr of
|
||||||
|
"_eq" -> parseEq
|
||||||
|
"$eq" -> parseEq
|
||||||
|
|
||||||
|
"_neq" -> parseNeq
|
||||||
|
"$neq" -> parseNeq
|
||||||
|
|
||||||
|
"_gt" -> parseGt
|
||||||
|
"$gt" -> parseGt
|
||||||
|
|
||||||
|
"_lt" -> parseLt
|
||||||
|
"$lt" -> parseLt
|
||||||
|
|
||||||
|
"_gte" -> parseGte
|
||||||
|
"$gte" -> parseGte
|
||||||
|
|
||||||
|
"_lte" -> parseLte
|
||||||
|
"$lte" -> parseLte
|
||||||
|
|
||||||
|
x -> throw400 UnexpectedPayload $ "Unknown operator : " <> x
|
||||||
|
|
||||||
|
where
|
||||||
|
parseOne = parseWithTy columnType val
|
||||||
|
|
||||||
|
parseEq = AEQ False <$> parseOne
|
||||||
|
parseNeq = ANE False <$> parseOne
|
||||||
|
parseGt = AGT <$> parseOne
|
||||||
|
parseLt = ALT <$> parseOne
|
||||||
|
parseGte = AGTE <$> parseOne
|
||||||
|
parseLte = ALTE <$> parseOne
|
37
server/src-lib/Hasura/Backends/MSSQL/DDL/RunSQL.hs
Normal file
37
server/src-lib/Hasura/Backends/MSSQL/DDL/RunSQL.hs
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
module Hasura.Backends.MSSQL.DDL.RunSQL
|
||||||
|
(runSQL)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import Control.Exception
|
||||||
|
import Data.String (fromString)
|
||||||
|
|
||||||
|
import qualified Data.Aeson as J
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import qualified Database.ODBC.Internal as ODBC
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.Result
|
||||||
|
import Hasura.Backends.MSSQL.Types
|
||||||
|
import Hasura.EncJSON
|
||||||
|
import Hasura.RQL.DDL.Schema (RunSQLRes (..))
|
||||||
|
import Hasura.RQL.Types
|
||||||
|
|
||||||
|
|
||||||
|
runSQL
|
||||||
|
:: (MonadIO m, CacheRWM m, MonadError QErr m)
|
||||||
|
=> MSSQLRunSQL -> m EncJSON
|
||||||
|
runSQL (MSSQLRunSQL sqlText source) = do
|
||||||
|
connection <- _mscConnection <$> askSourceConfig source
|
||||||
|
resultsEither <- liftIO $ try $ ODBC.query connection $ fromString $ T.unpack sqlText
|
||||||
|
case resultsEither of
|
||||||
|
Left (e :: SomeException) -> throw400 Unexpected $ "unexpected exception while executing query: " <> tshow e
|
||||||
|
Right results -> pure $ encJFromJValue $ toResult results
|
||||||
|
|
||||||
|
toResult :: [[(ODBC.Column, ODBC.Value)]] -> RunSQLRes
|
||||||
|
toResult result = case result of
|
||||||
|
[] -> RunSQLRes "CommandOk" J.Null
|
||||||
|
(firstRow:_) -> RunSQLRes "TuplesOk" $ J.toJSON $ toHeader firstRow : toRows result
|
||||||
|
where
|
||||||
|
toRows = map $ map $ odbcValueToJValue . snd
|
||||||
|
toHeader = map $ J.String . ODBC.columnName . fst
|
58
server/src-lib/Hasura/Backends/MSSQL/DDL/Source.hs
Normal file
58
server/src-lib/Hasura/Backends/MSSQL/DDL/Source.hs
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
module Hasura.Backends.MSSQL.DDL.Source
|
||||||
|
( resolveSourceConfig
|
||||||
|
, resolveDatabaseMetadata
|
||||||
|
, postDropSourceHook
|
||||||
|
)
|
||||||
|
where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import Control.Exception
|
||||||
|
|
||||||
|
import qualified Database.ODBC.SQLServer as ODBC
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.Connection
|
||||||
|
import Hasura.Backends.MSSQL.Meta
|
||||||
|
import Hasura.Backends.MSSQL.Types
|
||||||
|
import Hasura.RQL.Types.Common
|
||||||
|
import Hasura.RQL.Types.Error
|
||||||
|
import Hasura.RQL.Types.Source
|
||||||
|
import Hasura.SQL.Backend
|
||||||
|
|
||||||
|
resolveSourceConfig
|
||||||
|
:: (MonadIO m)
|
||||||
|
=> SourceName
|
||||||
|
-> MSSQLConnConfiguration
|
||||||
|
-> m (Either QErr MSSQLSourceConfig)
|
||||||
|
resolveSourceConfig _name config = runExceptT do
|
||||||
|
eitherResult <- liftIO $ try $ ODBC.connect connStringText
|
||||||
|
case eitherResult of
|
||||||
|
Left (e :: SomeException) ->
|
||||||
|
throw400 Unexpected $ "unexpected exception while connecting to database: " <> tshow e
|
||||||
|
Right conn ->
|
||||||
|
pure $ MSSQLSourceConfig connString conn
|
||||||
|
where
|
||||||
|
MSSQLConnConfiguration connInfo = config
|
||||||
|
connString = _mciConnectionString connInfo
|
||||||
|
connStringText = unMSSQLConnectionString connString
|
||||||
|
|
||||||
|
resolveDatabaseMetadata
|
||||||
|
:: (MonadIO m)
|
||||||
|
=> MSSQLSourceConfig
|
||||||
|
-> m (Either QErr (ResolvedSource 'MSSQL))
|
||||||
|
resolveDatabaseMetadata config = runExceptT do
|
||||||
|
eitherResult <- liftIO $ try $ loadDBMetadata conn
|
||||||
|
case eitherResult of
|
||||||
|
Left (e :: SomeException) ->
|
||||||
|
throw400 Unexpected $ "unexpected exception while connecting to database: " <> tshow e
|
||||||
|
Right dbTablesMetadata -> do
|
||||||
|
pure $ ResolvedSource config dbTablesMetadata mempty mempty
|
||||||
|
where
|
||||||
|
MSSQLSourceConfig _connString conn = config
|
||||||
|
|
||||||
|
postDropSourceHook
|
||||||
|
:: (MonadIO m)
|
||||||
|
=> MSSQLSourceConfig -> m ()
|
||||||
|
postDropSourceHook (MSSQLSourceConfig _ conn) =
|
||||||
|
-- Close the connection
|
||||||
|
ODBC.close conn
|
960
server/src-lib/Hasura/Backends/MSSQL/FromIr.hs
Normal file
960
server/src-lib/Hasura/Backends/MSSQL/FromIr.hs
Normal file
@ -0,0 +1,960 @@
|
|||||||
|
-- | Translate from the DML to the TSql dialect.
|
||||||
|
|
||||||
|
module Hasura.Backends.MSSQL.FromIr
|
||||||
|
( fromSelectRows
|
||||||
|
, mkSQLSelect
|
||||||
|
, fromRootField
|
||||||
|
, fromSelectAggregate
|
||||||
|
, fromAnnBoolExp
|
||||||
|
, Error(..)
|
||||||
|
, runFromIr
|
||||||
|
, FromIr
|
||||||
|
, jsonFieldName
|
||||||
|
, fromDelete
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Data.HashMap.Strict as HM
|
||||||
|
import qualified Data.Map.Strict as M
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import qualified Database.ODBC.SQLServer as ODBC
|
||||||
|
|
||||||
|
--import Control.Monad.Trans.State.Strict as S
|
||||||
|
import Control.Monad.Validate
|
||||||
|
import Data.Map.Strict (Map)
|
||||||
|
import Data.Proxy
|
||||||
|
|
||||||
|
import qualified Hasura.GraphQL.Context as GraphQL
|
||||||
|
import qualified Hasura.RQL.IR.BoolExp as IR
|
||||||
|
import qualified Hasura.RQL.IR.Delete as IR
|
||||||
|
import qualified Hasura.RQL.IR.OrderBy as IR
|
||||||
|
import qualified Hasura.RQL.IR.Select as IR
|
||||||
|
import qualified Hasura.RQL.Types.Column as IR
|
||||||
|
import qualified Hasura.RQL.Types.Common as IR
|
||||||
|
import qualified Hasura.RQL.Types.Relationship as IR
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.Instances.Types ()
|
||||||
|
import Hasura.Backends.MSSQL.Types as TSQL
|
||||||
|
import Hasura.SQL.Backend
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Types
|
||||||
|
|
||||||
|
-- | Most of these errors should be checked for legitimacy.
|
||||||
|
data Error
|
||||||
|
= UnsupportedOpExpG (IR.OpExpG 'MSSQL Expression)
|
||||||
|
| FunctionNotSupported
|
||||||
|
deriving (Show, Eq)
|
||||||
|
|
||||||
|
-- | The base monad used throughout this module for all conversion
|
||||||
|
-- functions.
|
||||||
|
--
|
||||||
|
-- It's a Validate, so it'll continue going when it encounters errors
|
||||||
|
-- to accumulate as many as possible.
|
||||||
|
--
|
||||||
|
-- It also contains a mapping from entity prefixes to counters. So if
|
||||||
|
-- my prefix is "table" then there'll be a counter that lets me
|
||||||
|
-- generate table1, table2, etc. Same for any other prefix needed
|
||||||
|
-- (e.g. names for joins).
|
||||||
|
--
|
||||||
|
-- A ReaderT is used around this in most of the module too, for
|
||||||
|
-- setting the current entity that a given field name refers to. See
|
||||||
|
-- @fromPGCol@.
|
||||||
|
newtype FromIr a = FromIr
|
||||||
|
{ unFromIr :: StateT (Map Text Int) (Validate (NonEmpty Error)) a
|
||||||
|
} deriving (Functor, Applicative, Monad, MonadValidate (NonEmpty Error))
|
||||||
|
|
||||||
|
data StringifyNumbers
|
||||||
|
= StringifyNumbers
|
||||||
|
| LeaveNumbersAlone
|
||||||
|
deriving (Eq)
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Runners
|
||||||
|
|
||||||
|
runFromIr :: FromIr a -> Validate (NonEmpty Error) a
|
||||||
|
runFromIr fromIr = evalStateT (unFromIr fromIr) mempty
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Similar rendition of old API
|
||||||
|
|
||||||
|
mkSQLSelect ::
|
||||||
|
IR.JsonAggSelect
|
||||||
|
-> IR.AnnSelectG 'MSSQL (IR.AnnFieldsG 'MSSQL Expression) Expression
|
||||||
|
-> FromIr TSQL.Select
|
||||||
|
mkSQLSelect jsonAggSelect annSimpleSel =
|
||||||
|
case jsonAggSelect of
|
||||||
|
IR.JASMultipleRows -> fromSelectRows annSimpleSel
|
||||||
|
IR.JASSingleObject -> do
|
||||||
|
select <- fromSelectRows annSimpleSel
|
||||||
|
pure
|
||||||
|
select
|
||||||
|
{ selectFor =
|
||||||
|
JsonFor
|
||||||
|
ForJson {jsonCardinality = JsonSingleton, jsonRoot = NoRoot}
|
||||||
|
, selectTop = Top 1
|
||||||
|
}
|
||||||
|
|
||||||
|
-- | Convert from the IR database query into a select.
|
||||||
|
fromRootField :: GraphQL.QueryDB 'MSSQL Expression -> FromIr Select
|
||||||
|
fromRootField =
|
||||||
|
\case
|
||||||
|
(GraphQL.QDBSingleRow s) -> mkSQLSelect IR.JASSingleObject s
|
||||||
|
(GraphQL.QDBMultipleRows s) -> mkSQLSelect IR.JASMultipleRows s
|
||||||
|
(GraphQL.QDBAggregation s) -> fromSelectAggregate s
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Top-level exported functions
|
||||||
|
|
||||||
|
fromSelectRows :: IR.AnnSelectG 'MSSQL (IR.AnnFieldsG 'MSSQL Expression) Expression -> FromIr TSQL.Select
|
||||||
|
fromSelectRows annSelectG = do
|
||||||
|
selectFrom <-
|
||||||
|
case from of
|
||||||
|
IR.FromTable qualifiedObject -> fromQualifiedTable qualifiedObject
|
||||||
|
IR.FromFunction _ _ _ -> refute $ pure FunctionNotSupported
|
||||||
|
Args { argsOrderBy
|
||||||
|
, argsWhere
|
||||||
|
, argsJoins
|
||||||
|
, argsTop
|
||||||
|
, argsDistinct = Proxy
|
||||||
|
, argsOffset
|
||||||
|
, argsExistingJoins
|
||||||
|
} <- runReaderT (fromSelectArgsG args) (fromAlias selectFrom)
|
||||||
|
fieldSources <-
|
||||||
|
runReaderT
|
||||||
|
(traverse (fromAnnFieldsG argsExistingJoins stringifyNumbers) fields)
|
||||||
|
(fromAlias selectFrom)
|
||||||
|
filterExpression <-
|
||||||
|
runReaderT (fromAnnBoolExp permFilter) (fromAlias selectFrom)
|
||||||
|
let selectProjections =
|
||||||
|
concatMap (toList . fieldSourceProjections) fieldSources
|
||||||
|
pure
|
||||||
|
Select
|
||||||
|
{ selectOrderBy = argsOrderBy
|
||||||
|
, selectTop = permissionBasedTop <> argsTop
|
||||||
|
, selectProjections
|
||||||
|
, selectFrom
|
||||||
|
, selectJoins = argsJoins <> mapMaybe fieldSourceJoin fieldSources
|
||||||
|
, selectWhere = argsWhere <> Where [filterExpression]
|
||||||
|
, selectFor =
|
||||||
|
JsonFor ForJson {jsonCardinality = JsonArray, jsonRoot = NoRoot}
|
||||||
|
, selectOffset = argsOffset
|
||||||
|
}
|
||||||
|
where
|
||||||
|
IR.AnnSelectG { _asnFields = fields
|
||||||
|
, _asnFrom = from
|
||||||
|
, _asnPerm = perm
|
||||||
|
, _asnArgs = args
|
||||||
|
, _asnStrfyNum = num
|
||||||
|
} = annSelectG
|
||||||
|
IR.TablePerm {_tpLimit = mPermLimit, _tpFilter = permFilter} = perm
|
||||||
|
permissionBasedTop =
|
||||||
|
case mPermLimit of
|
||||||
|
Nothing -> NoTop
|
||||||
|
Just limit -> Top limit
|
||||||
|
stringifyNumbers =
|
||||||
|
if num
|
||||||
|
then StringifyNumbers
|
||||||
|
else LeaveNumbersAlone
|
||||||
|
|
||||||
|
fromSelectAggregate ::
|
||||||
|
IR.AnnSelectG 'MSSQL [(IR.FieldName, IR.TableAggregateFieldG 'MSSQL Expression)] Expression
|
||||||
|
-> FromIr TSQL.Select
|
||||||
|
fromSelectAggregate annSelectG = do
|
||||||
|
selectFrom <-
|
||||||
|
case from of
|
||||||
|
IR.FromTable qualifiedObject -> fromQualifiedTable qualifiedObject
|
||||||
|
IR.FromFunction _ _ _ -> refute $ pure FunctionNotSupported
|
||||||
|
fieldSources <-
|
||||||
|
runReaderT (traverse fromTableAggregateFieldG fields) (fromAlias selectFrom)
|
||||||
|
filterExpression <-
|
||||||
|
runReaderT (fromAnnBoolExp permFilter) (fromAlias selectFrom)
|
||||||
|
Args { argsOrderBy
|
||||||
|
, argsWhere
|
||||||
|
, argsJoins
|
||||||
|
, argsTop
|
||||||
|
, argsDistinct = Proxy
|
||||||
|
, argsOffset
|
||||||
|
} <- runReaderT (fromSelectArgsG args) (fromAlias selectFrom)
|
||||||
|
let selectProjections =
|
||||||
|
concatMap (toList . fieldSourceProjections) fieldSources
|
||||||
|
pure
|
||||||
|
Select
|
||||||
|
{ selectProjections
|
||||||
|
, selectTop = permissionBasedTop <> argsTop
|
||||||
|
, selectFrom
|
||||||
|
, selectJoins = argsJoins <> mapMaybe fieldSourceJoin fieldSources
|
||||||
|
, selectWhere = argsWhere <> Where [filterExpression]
|
||||||
|
, selectFor =
|
||||||
|
JsonFor ForJson {jsonCardinality = JsonSingleton, jsonRoot = NoRoot}
|
||||||
|
, selectOrderBy = argsOrderBy
|
||||||
|
, selectOffset = argsOffset
|
||||||
|
}
|
||||||
|
where
|
||||||
|
IR.AnnSelectG { _asnFields = fields
|
||||||
|
, _asnFrom = from
|
||||||
|
, _asnPerm = perm
|
||||||
|
, _asnArgs = args
|
||||||
|
, _asnStrfyNum = _num -- TODO: Do we ignore this for aggregates?
|
||||||
|
} = annSelectG
|
||||||
|
IR.TablePerm {_tpLimit = mPermLimit, _tpFilter = permFilter} = perm
|
||||||
|
permissionBasedTop =
|
||||||
|
case mPermLimit of
|
||||||
|
Nothing -> NoTop
|
||||||
|
Just limit -> Top limit
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- GraphQL Args
|
||||||
|
|
||||||
|
data Args = Args
|
||||||
|
{ argsWhere :: Where
|
||||||
|
, argsOrderBy :: Maybe (NonEmpty OrderBy)
|
||||||
|
, argsJoins :: [Join]
|
||||||
|
, argsTop :: Top
|
||||||
|
, argsOffset :: Maybe Expression
|
||||||
|
, argsDistinct :: Proxy (Maybe (NonEmpty FieldName))
|
||||||
|
, argsExistingJoins :: Map TableName EntityAlias
|
||||||
|
} deriving (Show)
|
||||||
|
|
||||||
|
data UnfurledJoin = UnfurledJoin
|
||||||
|
{ unfurledJoin :: Join
|
||||||
|
, unfurledObjectTableAlias :: Maybe (TableName, EntityAlias)
|
||||||
|
-- ^ Recorded if we joined onto an object relation.
|
||||||
|
} deriving (Show)
|
||||||
|
|
||||||
|
fromSelectArgsG :: IR.SelectArgsG 'MSSQL Expression -> ReaderT EntityAlias FromIr Args
|
||||||
|
fromSelectArgsG selectArgsG = do
|
||||||
|
argsWhere <-
|
||||||
|
maybe (pure mempty) (fmap (Where . pure) . fromAnnBoolExp) mannBoolExp
|
||||||
|
argsTop <- maybe (pure mempty) (pure . Top) mlimit
|
||||||
|
argsOffset <-
|
||||||
|
maybe (pure Nothing) (fmap Just . lift . fromSQLExpAsInt) moffset
|
||||||
|
-- Not supported presently, per Vamshi:
|
||||||
|
--
|
||||||
|
-- > It is hardly used and we don't have to go to great lengths to support it.
|
||||||
|
--
|
||||||
|
-- But placeholdering the code so that when it's ready to be used,
|
||||||
|
-- you can just drop the Proxy wrapper.
|
||||||
|
argsDistinct <-
|
||||||
|
case mdistinct of
|
||||||
|
Nothing -> pure Proxy
|
||||||
|
Just (x, _) -> case x of {}
|
||||||
|
(argsOrderBy, joins) <-
|
||||||
|
runWriterT (traverse fromAnnOrderByItemG (maybe [] toList orders))
|
||||||
|
-- Any object-relation joins that we generated, we record their
|
||||||
|
-- generated names into a mapping.
|
||||||
|
let argsExistingJoins =
|
||||||
|
M.fromList (mapMaybe unfurledObjectTableAlias (toList joins))
|
||||||
|
pure
|
||||||
|
Args
|
||||||
|
{ argsJoins = toList (fmap unfurledJoin joins)
|
||||||
|
, argsOrderBy = nonEmpty argsOrderBy
|
||||||
|
, ..
|
||||||
|
}
|
||||||
|
where
|
||||||
|
IR.SelectArgs { _saWhere = mannBoolExp
|
||||||
|
, _saLimit = mlimit
|
||||||
|
, _saOffset = moffset
|
||||||
|
, _saDistinct = mdistinct
|
||||||
|
, _saOrderBy = orders
|
||||||
|
} = selectArgsG
|
||||||
|
|
||||||
|
-- | Produce a valid ORDER BY construct, telling about any joins
|
||||||
|
-- needed on the side.
|
||||||
|
fromAnnOrderByItemG ::
|
||||||
|
IR.AnnOrderByItemG 'MSSQL Expression -> WriterT (Seq UnfurledJoin) (ReaderT EntityAlias FromIr) OrderBy
|
||||||
|
fromAnnOrderByItemG IR.OrderByItemG {obiType, obiColumn, obiNulls} = do
|
||||||
|
orderByFieldName <- unfurlAnnOrderByElement obiColumn
|
||||||
|
let orderByNullsOrder = fromMaybe NullsAnyOrder obiNulls
|
||||||
|
orderByOrder = fromMaybe AscOrder obiType
|
||||||
|
pure OrderBy {..}
|
||||||
|
|
||||||
|
-- | Unfurl the nested set of object relations (tell'd in the writer)
|
||||||
|
-- that are terminated by field name (IR.AOCColumn and
|
||||||
|
-- IR.AOCArrayAggregation).
|
||||||
|
unfurlAnnOrderByElement ::
|
||||||
|
IR.AnnOrderByElement 'MSSQL Expression -> WriterT (Seq UnfurledJoin) (ReaderT EntityAlias FromIr) FieldName
|
||||||
|
unfurlAnnOrderByElement =
|
||||||
|
\case
|
||||||
|
IR.AOCColumn pgColumnInfo ->
|
||||||
|
lift (fromPGColumnInfo pgColumnInfo)
|
||||||
|
IR.AOCObjectRelation IR.RelInfo {riMapping = mapping, riRTable = table} annBoolExp annOrderByElementG -> do
|
||||||
|
selectFrom <- lift (lift (fromQualifiedTable table))
|
||||||
|
joinAliasEntity <-
|
||||||
|
lift (lift (generateEntityAlias (ForOrderAlias (tableNameText table))))
|
||||||
|
foreignKeyConditions <- lift (fromMapping selectFrom mapping)
|
||||||
|
-- TODO: Because these object relations are re-used by regular
|
||||||
|
-- object mapping queries, this WHERE may be unnecessarily
|
||||||
|
-- restrictive. But I actually don't know from where such an
|
||||||
|
-- expression arises in the source GraphQL syntax.
|
||||||
|
--
|
||||||
|
-- Worst case scenario, we could put the WHERE in the key of the
|
||||||
|
-- Map in 'argsExistingJoins'. That would guarantee only equal
|
||||||
|
-- selects are re-used.
|
||||||
|
whereExpression <-
|
||||||
|
lift (local (const (fromAlias selectFrom)) (fromAnnBoolExp annBoolExp))
|
||||||
|
tell
|
||||||
|
(pure
|
||||||
|
UnfurledJoin
|
||||||
|
{ unfurledJoin =
|
||||||
|
Join
|
||||||
|
{ joinSource =
|
||||||
|
JoinSelect
|
||||||
|
Select
|
||||||
|
{ selectTop = NoTop
|
||||||
|
, selectProjections = [StarProjection]
|
||||||
|
, selectFrom
|
||||||
|
, selectJoins = []
|
||||||
|
, selectWhere =
|
||||||
|
Where (foreignKeyConditions <> [whereExpression])
|
||||||
|
, selectFor = NoFor
|
||||||
|
, selectOrderBy = Nothing
|
||||||
|
, selectOffset = Nothing
|
||||||
|
}
|
||||||
|
, joinJoinAlias =
|
||||||
|
JoinAlias {joinAliasEntity, joinAliasField = Nothing}
|
||||||
|
}
|
||||||
|
, unfurledObjectTableAlias = Just (table, EntityAlias joinAliasEntity)
|
||||||
|
})
|
||||||
|
local
|
||||||
|
(const (EntityAlias joinAliasEntity))
|
||||||
|
(unfurlAnnOrderByElement annOrderByElementG)
|
||||||
|
IR.AOCArrayAggregation IR.RelInfo {riMapping = mapping, riRTable = table} annBoolExp annAggregateOrderBy -> do
|
||||||
|
selectFrom <- lift (lift (fromQualifiedTable table))
|
||||||
|
let alias = aggFieldName
|
||||||
|
joinAliasEntity <-
|
||||||
|
lift (lift (generateEntityAlias (ForOrderAlias (tableNameText table))))
|
||||||
|
foreignKeyConditions <- lift (fromMapping selectFrom mapping)
|
||||||
|
whereExpression <-
|
||||||
|
lift (local (const (fromAlias selectFrom)) (fromAnnBoolExp annBoolExp))
|
||||||
|
aggregate <-
|
||||||
|
lift
|
||||||
|
(local
|
||||||
|
(const (fromAlias selectFrom))
|
||||||
|
(case annAggregateOrderBy of
|
||||||
|
IR.AAOCount -> pure (CountAggregate StarCountable)
|
||||||
|
IR.AAOOp text pgColumnInfo -> do
|
||||||
|
fieldName <- fromPGColumnInfo pgColumnInfo
|
||||||
|
pure (OpAggregate text (pure (ColumnExpression fieldName)))))
|
||||||
|
tell
|
||||||
|
(pure
|
||||||
|
(UnfurledJoin
|
||||||
|
{ unfurledJoin =
|
||||||
|
Join
|
||||||
|
{ joinSource =
|
||||||
|
JoinSelect
|
||||||
|
Select
|
||||||
|
{ selectTop = NoTop
|
||||||
|
, selectProjections =
|
||||||
|
[ AggregateProjection
|
||||||
|
Aliased
|
||||||
|
{ aliasedThing = aggregate
|
||||||
|
, aliasedAlias = alias
|
||||||
|
}
|
||||||
|
]
|
||||||
|
, selectFrom
|
||||||
|
, selectJoins = []
|
||||||
|
, selectWhere =
|
||||||
|
Where
|
||||||
|
(foreignKeyConditions <> [whereExpression])
|
||||||
|
, selectFor = NoFor
|
||||||
|
, selectOrderBy = Nothing
|
||||||
|
, selectOffset = Nothing
|
||||||
|
}
|
||||||
|
, joinJoinAlias =
|
||||||
|
JoinAlias {joinAliasEntity, joinAliasField = Nothing}
|
||||||
|
}
|
||||||
|
, unfurledObjectTableAlias = Nothing
|
||||||
|
}))
|
||||||
|
pure FieldName {fieldNameEntity = joinAliasEntity, fieldName = alias}
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Conversion functions
|
||||||
|
|
||||||
|
tableNameText :: {-PG.QualifiedObject-} TableName -> Text
|
||||||
|
tableNameText (TableName {tableName}) = tableName
|
||||||
|
-- tableNameText qualifiedObject = qname
|
||||||
|
-- where
|
||||||
|
-- PG.QualifiedObject {qName = PG.TableName qname} = qualifiedObject
|
||||||
|
|
||||||
|
-- | This is really the start where you query the base table,
|
||||||
|
-- everything else is joins attached to it.
|
||||||
|
fromQualifiedTable :: TableName -> FromIr From
|
||||||
|
fromQualifiedTable schemadTableName@(TableName{tableName}) = do
|
||||||
|
alias <- generateEntityAlias (TableTemplate tableName)
|
||||||
|
pure
|
||||||
|
(FromQualifiedTable
|
||||||
|
(Aliased
|
||||||
|
{ aliasedThing =
|
||||||
|
schemadTableName {-TableName {tableName = qname, tableNameSchema = schemaName}-}
|
||||||
|
, aliasedAlias = alias
|
||||||
|
}))
|
||||||
|
-- where
|
||||||
|
-- PG.QualifiedObject { qSchema = PG.SchemaName schemaName
|
||||||
|
-- -- TODO: Consider many x.y.z. in schema name.
|
||||||
|
-- , qName = PG.TableName qname
|
||||||
|
-- } = qualifiedObject
|
||||||
|
|
||||||
|
fromTableName :: TableName -> FromIr EntityAlias
|
||||||
|
fromTableName TableName{tableName} = do
|
||||||
|
alias <- generateEntityAlias (TableTemplate tableName)
|
||||||
|
pure (EntityAlias alias)
|
||||||
|
|
||||||
|
fromAnnBoolExp ::
|
||||||
|
IR.GBoolExp 'MSSQL (IR.AnnBoolExpFld 'MSSQL Expression)
|
||||||
|
-> ReaderT EntityAlias FromIr Expression
|
||||||
|
fromAnnBoolExp = traverse fromAnnBoolExpFld >=> fromGBoolExp
|
||||||
|
|
||||||
|
fromAnnBoolExpFld ::
|
||||||
|
IR.AnnBoolExpFld 'MSSQL Expression -> ReaderT EntityAlias FromIr Expression
|
||||||
|
fromAnnBoolExpFld =
|
||||||
|
\case
|
||||||
|
IR.AVCol pgColumnInfo opExpGs -> do
|
||||||
|
expression <- fmap ColumnExpression (fromPGColumnInfo pgColumnInfo)
|
||||||
|
expressions <- traverse (lift . fromOpExpG expression) opExpGs
|
||||||
|
pure (AndExpression expressions)
|
||||||
|
IR.AVRel IR.RelInfo {riMapping = mapping, riRTable = table} annBoolExp -> do
|
||||||
|
selectFrom <- lift (fromQualifiedTable table)
|
||||||
|
foreignKeyConditions <- fromMapping selectFrom mapping
|
||||||
|
whereExpression <-
|
||||||
|
local (const (fromAlias selectFrom)) (fromAnnBoolExp annBoolExp)
|
||||||
|
pure
|
||||||
|
(ExistsExpression
|
||||||
|
Select
|
||||||
|
{ selectOrderBy = Nothing
|
||||||
|
, selectProjections =
|
||||||
|
[ ExpressionProjection
|
||||||
|
(Aliased
|
||||||
|
{ aliasedThing = trueExpression
|
||||||
|
, aliasedAlias = existsFieldName
|
||||||
|
})
|
||||||
|
]
|
||||||
|
, selectFrom
|
||||||
|
, selectJoins = mempty
|
||||||
|
, selectWhere = Where (foreignKeyConditions <> [whereExpression])
|
||||||
|
, selectTop = NoTop
|
||||||
|
, selectFor = NoFor
|
||||||
|
, selectOffset = Nothing
|
||||||
|
})
|
||||||
|
|
||||||
|
fromPGColumnInfo :: IR.ColumnInfo 'MSSQL -> ReaderT EntityAlias FromIr FieldName
|
||||||
|
fromPGColumnInfo IR.ColumnInfo {pgiColumn = pgCol} =
|
||||||
|
columnNameToFieldName pgCol <$> ask
|
||||||
|
-- entityAlias <- ask
|
||||||
|
-- pure
|
||||||
|
-- (columnNameToFieldName pgCol entityAlias
|
||||||
|
-- FieldName
|
||||||
|
-- {fieldName = PG.getPGColTxt pgCol, fieldNameEntity = entityAliasText})
|
||||||
|
|
||||||
|
fromGExists :: IR.GExists 'MSSQL Expression -> ReaderT EntityAlias FromIr Select
|
||||||
|
fromGExists IR.GExists {_geTable, _geWhere} = do
|
||||||
|
selectFrom <- lift (fromQualifiedTable _geTable)
|
||||||
|
whereExpression <-
|
||||||
|
local (const (fromAlias selectFrom)) (fromGBoolExp _geWhere)
|
||||||
|
pure
|
||||||
|
Select
|
||||||
|
{ selectOrderBy = Nothing
|
||||||
|
, selectProjections =
|
||||||
|
[ ExpressionProjection
|
||||||
|
(Aliased
|
||||||
|
{ aliasedThing = trueExpression
|
||||||
|
, aliasedAlias = existsFieldName
|
||||||
|
})
|
||||||
|
]
|
||||||
|
, selectFrom
|
||||||
|
, selectJoins = mempty
|
||||||
|
, selectWhere = Where [whereExpression]
|
||||||
|
, selectTop = NoTop
|
||||||
|
, selectFor = NoFor
|
||||||
|
, selectOffset = Nothing
|
||||||
|
}
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Sources of projected fields
|
||||||
|
--
|
||||||
|
-- Because in the IR, a field projected can be a foreign object, we
|
||||||
|
-- have to both generate a projection AND on the side generate a join.
|
||||||
|
--
|
||||||
|
-- So a @FieldSource@ couples the idea of the projected thing and the
|
||||||
|
-- source of it (via 'Aliased').
|
||||||
|
|
||||||
|
data FieldSource
|
||||||
|
= ExpressionFieldSource (Aliased Expression)
|
||||||
|
| JoinFieldSource (Aliased Join)
|
||||||
|
| AggregateFieldSource [Aliased Aggregate]
|
||||||
|
deriving (Eq, Show)
|
||||||
|
|
||||||
|
fromTableAggregateFieldG ::
|
||||||
|
(IR.FieldName, IR.TableAggregateFieldG 'MSSQL Expression) -> ReaderT EntityAlias FromIr FieldSource
|
||||||
|
fromTableAggregateFieldG (IR.FieldName name, field) =
|
||||||
|
case field of
|
||||||
|
IR.TAFAgg (aggregateFields :: [(IR.FieldName, IR.AggregateField 'MSSQL)]) -> do
|
||||||
|
aggregates <-
|
||||||
|
for aggregateFields \(fieldName, aggregateField) ->
|
||||||
|
fromAggregateField aggregateField <&> \aliasedThing ->
|
||||||
|
Aliased {aliasedAlias = IR.getFieldNameTxt fieldName, ..}
|
||||||
|
pure (AggregateFieldSource aggregates)
|
||||||
|
IR.TAFExp text ->
|
||||||
|
pure
|
||||||
|
(ExpressionFieldSource
|
||||||
|
Aliased
|
||||||
|
{ aliasedThing = TSQL.ValueExpression (ODBC.TextValue text)
|
||||||
|
, aliasedAlias = name
|
||||||
|
})
|
||||||
|
IR.TAFNodes x _ -> case x of {}
|
||||||
|
|
||||||
|
fromAggregateField :: IR.AggregateField 'MSSQL -> ReaderT EntityAlias FromIr Aggregate
|
||||||
|
fromAggregateField aggregateField =
|
||||||
|
case aggregateField of
|
||||||
|
IR.AFExp text -> pure (TextAggregate text)
|
||||||
|
IR.AFCount countType -> CountAggregate <$> case countType of
|
||||||
|
StarCountable -> pure StarCountable
|
||||||
|
NonNullFieldCountable names -> NonNullFieldCountable <$> traverse fromPGCol names
|
||||||
|
DistinctCountable names -> DistinctCountable <$> traverse fromPGCol names
|
||||||
|
|
||||||
|
-- fmap
|
||||||
|
-- CountAggregate
|
||||||
|
-- (pure countType
|
||||||
|
-- case countType of
|
||||||
|
-- PG.CTStar -> pure StarCountable
|
||||||
|
-- PG.CTSimple fields ->
|
||||||
|
-- case nonEmpty fields of
|
||||||
|
-- Nothing -> refute (pure MalformedAgg)
|
||||||
|
-- Just fields' -> do
|
||||||
|
-- fields'' <- traverse fromPGCol fields'
|
||||||
|
-- pure (NonNullFieldCountable fields'')
|
||||||
|
-- PG.CTDistinct fields ->
|
||||||
|
-- case nonEmpty fields of
|
||||||
|
-- Nothing -> refute (pure MalformedAgg)
|
||||||
|
-- Just fields' -> do
|
||||||
|
-- fields'' <- traverse fromPGCol fields'
|
||||||
|
-- pure (DistinctCountable fields''))
|
||||||
|
IR.AFOp IR.AggregateOp {_aoOp = op, _aoFields = fields} -> do
|
||||||
|
args <- for fields \(_fieldName, pgColFld) ->
|
||||||
|
case pgColFld of
|
||||||
|
IR.CFCol pgCol _pgType -> fmap ColumnExpression (fromPGCol pgCol)
|
||||||
|
IR.CFExp text -> pure (ValueExpression (ODBC.TextValue text))
|
||||||
|
pure (OpAggregate op args)
|
||||||
|
|
||||||
|
-- | The main sources of fields, either constants, fields or via joins.
|
||||||
|
fromAnnFieldsG ::
|
||||||
|
Map TableName EntityAlias
|
||||||
|
-> StringifyNumbers
|
||||||
|
-> (IR.FieldName, IR.AnnFieldG 'MSSQL Expression)
|
||||||
|
-> ReaderT EntityAlias FromIr FieldSource
|
||||||
|
fromAnnFieldsG existingJoins stringifyNumbers (IR.FieldName name, field) =
|
||||||
|
case field of
|
||||||
|
IR.AFColumn annColumnField -> do
|
||||||
|
expression <- fromAnnColumnField stringifyNumbers annColumnField
|
||||||
|
pure
|
||||||
|
(ExpressionFieldSource
|
||||||
|
Aliased {aliasedThing = expression, aliasedAlias = name})
|
||||||
|
IR.AFExpression text ->
|
||||||
|
pure
|
||||||
|
(ExpressionFieldSource
|
||||||
|
Aliased
|
||||||
|
{ aliasedThing = TSQL.ValueExpression (ODBC.TextValue text)
|
||||||
|
, aliasedAlias = name
|
||||||
|
})
|
||||||
|
IR.AFObjectRelation objectRelationSelectG ->
|
||||||
|
fmap
|
||||||
|
(\aliasedThing ->
|
||||||
|
JoinFieldSource (Aliased {aliasedThing, aliasedAlias = name}))
|
||||||
|
(fromObjectRelationSelectG existingJoins objectRelationSelectG)
|
||||||
|
IR.AFArrayRelation arraySelectG ->
|
||||||
|
fmap
|
||||||
|
(\aliasedThing ->
|
||||||
|
JoinFieldSource (Aliased {aliasedThing, aliasedAlias = name}))
|
||||||
|
(fromArraySelectG arraySelectG)
|
||||||
|
-- TODO:
|
||||||
|
-- Vamshi said to ignore these three for now:
|
||||||
|
IR.AFNodeId x _ _ -> case x of {}
|
||||||
|
IR.AFRemote x _ -> case x of {}
|
||||||
|
IR.AFComputedField x _ -> case x of {}
|
||||||
|
|
||||||
|
-- | Here is where we project a field as a column expression. If
|
||||||
|
-- number stringification is on, then we wrap it in a
|
||||||
|
-- 'ToStringExpression' so that it's casted when being projected.
|
||||||
|
fromAnnColumnField ::
|
||||||
|
StringifyNumbers
|
||||||
|
-> IR.AnnColumnField 'MSSQL
|
||||||
|
-> ReaderT EntityAlias FromIr Expression
|
||||||
|
fromAnnColumnField _stringifyNumbers annColumnField = do
|
||||||
|
fieldName <- fromPGCol pgCol
|
||||||
|
if asText || True -- TODO: FIXME:
|
||||||
|
-- TODO: Does MSSQL support bignums? Probably, but needs researching.
|
||||||
|
{-(IR.isScalarColumnWhere PG.isBigNum typ && stringifyNumbers == StringifyNumbers)-}
|
||||||
|
then pure (ToStringExpression (ColumnExpression fieldName))
|
||||||
|
else pure (ColumnExpression fieldName)
|
||||||
|
where
|
||||||
|
IR.AnnColumnField { _acfInfo = IR.ColumnInfo{pgiColumn=pgCol,pgiType=_typ}
|
||||||
|
, _acfAsText = asText :: Bool
|
||||||
|
, _acfOp = _ :: Maybe (IR.ColumnOp 'MSSQL) -- TODO: What's this?
|
||||||
|
} = annColumnField
|
||||||
|
|
||||||
|
-- | This is where a field name "foo" is resolved to a fully qualified
|
||||||
|
-- field name [table].[foo]. The table name comes from EntityAlias in
|
||||||
|
-- the ReaderT.
|
||||||
|
fromPGCol :: ColumnName -> ReaderT EntityAlias FromIr FieldName
|
||||||
|
fromPGCol pgCol = columnNameToFieldName pgCol <$> ask
|
||||||
|
-- entityAlias <- ask
|
||||||
|
-- pure (columnNameToFieldName pgCol entityAlias -- FieldName {fieldName = PG.getPGColTxt pgCol, fieldNameEntity = entityAliasText}
|
||||||
|
-- )
|
||||||
|
|
||||||
|
fieldSourceProjections :: FieldSource -> [Projection]
|
||||||
|
fieldSourceProjections =
|
||||||
|
\case
|
||||||
|
ExpressionFieldSource aliasedExpression ->
|
||||||
|
pure (ExpressionProjection aliasedExpression)
|
||||||
|
JoinFieldSource aliasedJoin ->
|
||||||
|
pure
|
||||||
|
(ExpressionProjection
|
||||||
|
(aliasedJoin
|
||||||
|
{ aliasedThing =
|
||||||
|
-- Basically a cast, to ensure that SQL Server won't
|
||||||
|
-- double-encode the JSON but will "pass it through"
|
||||||
|
-- untouched.
|
||||||
|
JsonQueryExpression
|
||||||
|
(ColumnExpression
|
||||||
|
(joinAliasToField
|
||||||
|
(joinJoinAlias (aliasedThing aliasedJoin))))
|
||||||
|
}))
|
||||||
|
AggregateFieldSource aggregates -> fmap AggregateProjection aggregates
|
||||||
|
|
||||||
|
joinAliasToField :: JoinAlias -> FieldName
|
||||||
|
joinAliasToField JoinAlias {..} =
|
||||||
|
FieldName
|
||||||
|
{ fieldNameEntity = joinAliasEntity
|
||||||
|
, fieldName = fromMaybe (error "TODO: Eliminate this case. joinAliasToField") joinAliasField
|
||||||
|
}
|
||||||
|
|
||||||
|
fieldSourceJoin :: FieldSource -> Maybe Join
|
||||||
|
fieldSourceJoin =
|
||||||
|
\case
|
||||||
|
JoinFieldSource aliasedJoin -> pure (aliasedThing aliasedJoin)
|
||||||
|
ExpressionFieldSource {} -> Nothing
|
||||||
|
AggregateFieldSource {} -> Nothing
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Joins
|
||||||
|
|
||||||
|
fromObjectRelationSelectG ::
|
||||||
|
Map TableName {-PG.QualifiedTable-} EntityAlias
|
||||||
|
-> IR.ObjectRelationSelectG 'MSSQL Expression
|
||||||
|
-> ReaderT EntityAlias FromIr Join
|
||||||
|
fromObjectRelationSelectG existingJoins annRelationSelectG = do
|
||||||
|
eitherAliasOrFrom <- lift (lookupTableFrom existingJoins tableFrom)
|
||||||
|
let entityAlias :: EntityAlias = either id fromAlias eitherAliasOrFrom
|
||||||
|
fieldSources <-
|
||||||
|
local
|
||||||
|
(const entityAlias)
|
||||||
|
(traverse (fromAnnFieldsG mempty LeaveNumbersAlone) fields)
|
||||||
|
let selectProjections =
|
||||||
|
concatMap (toList . fieldSourceProjections) fieldSources
|
||||||
|
joinJoinAlias <-
|
||||||
|
do fieldName <- lift (fromRelName aarRelationshipName)
|
||||||
|
alias <- lift (generateEntityAlias (ObjectRelationTemplate fieldName))
|
||||||
|
pure
|
||||||
|
JoinAlias
|
||||||
|
{joinAliasEntity = alias, joinAliasField = pure jsonFieldName}
|
||||||
|
let selectFor =
|
||||||
|
JsonFor ForJson {jsonCardinality = JsonSingleton, jsonRoot = NoRoot}
|
||||||
|
filterExpression <- local (const entityAlias) (fromAnnBoolExp tableFilter)
|
||||||
|
case eitherAliasOrFrom of
|
||||||
|
Right selectFrom -> do
|
||||||
|
foreignKeyConditions <- fromMapping selectFrom mapping
|
||||||
|
pure
|
||||||
|
Join
|
||||||
|
{ joinJoinAlias
|
||||||
|
, joinSource =
|
||||||
|
JoinSelect
|
||||||
|
Select
|
||||||
|
{ selectOrderBy = Nothing
|
||||||
|
, selectTop = NoTop
|
||||||
|
, selectProjections
|
||||||
|
, selectFrom
|
||||||
|
, selectJoins = mapMaybe fieldSourceJoin fieldSources
|
||||||
|
, selectWhere =
|
||||||
|
Where (foreignKeyConditions <> [filterExpression])
|
||||||
|
, selectFor
|
||||||
|
, selectOffset = Nothing
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Left _entityAlias ->
|
||||||
|
pure
|
||||||
|
Join
|
||||||
|
{ joinJoinAlias
|
||||||
|
, joinSource =
|
||||||
|
JoinReselect
|
||||||
|
Reselect
|
||||||
|
{ reselectProjections = selectProjections
|
||||||
|
, reselectFor = selectFor
|
||||||
|
, reselectWhere = Where [filterExpression]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
where
|
||||||
|
IR.AnnObjectSelectG { _aosFields = fields :: IR.AnnFieldsG 'MSSQL Expression
|
||||||
|
, _aosTableFrom = tableFrom :: TableName{-PG.QualifiedTable-}
|
||||||
|
, _aosTableFilter = tableFilter :: IR.AnnBoolExp 'MSSQL Expression
|
||||||
|
} = annObjectSelectG
|
||||||
|
IR.AnnRelationSelectG { aarRelationshipName
|
||||||
|
, aarColumnMapping = mapping :: HashMap ColumnName ColumnName -- PG.PGCol PG.PGCol
|
||||||
|
, aarAnnSelect = annObjectSelectG :: IR.AnnObjectSelectG 'MSSQL Expression
|
||||||
|
} = annRelationSelectG
|
||||||
|
|
||||||
|
lookupTableFrom ::
|
||||||
|
Map TableName {-PG.QualifiedTable-} EntityAlias
|
||||||
|
-> {-PG.QualifiedTable-}TableName
|
||||||
|
-> FromIr (Either EntityAlias From)
|
||||||
|
lookupTableFrom existingJoins tableFrom = do
|
||||||
|
case M.lookup tableFrom existingJoins of
|
||||||
|
Just entityAlias -> pure (Left entityAlias)
|
||||||
|
Nothing -> fmap Right (fromQualifiedTable tableFrom)
|
||||||
|
|
||||||
|
fromArraySelectG :: IR.ArraySelectG 'MSSQL Expression -> ReaderT EntityAlias FromIr Join
|
||||||
|
fromArraySelectG =
|
||||||
|
\case
|
||||||
|
IR.ASSimple arrayRelationSelectG ->
|
||||||
|
fromArrayRelationSelectG arrayRelationSelectG
|
||||||
|
IR.ASAggregate arrayAggregateSelectG ->
|
||||||
|
fromArrayAggregateSelectG arrayAggregateSelectG
|
||||||
|
|
||||||
|
fromArrayAggregateSelectG ::
|
||||||
|
IR.AnnRelationSelectG 'MSSQL (IR.AnnAggregateSelectG 'MSSQL Expression)
|
||||||
|
-> ReaderT EntityAlias FromIr Join
|
||||||
|
fromArrayAggregateSelectG annRelationSelectG = do
|
||||||
|
fieldName <- lift (fromRelName aarRelationshipName)
|
||||||
|
select <- lift (fromSelectAggregate annSelectG)
|
||||||
|
joinSelect <-
|
||||||
|
do foreignKeyConditions <- fromMapping (selectFrom select) mapping
|
||||||
|
pure
|
||||||
|
select {selectWhere = Where foreignKeyConditions <> selectWhere select}
|
||||||
|
alias <- lift (generateEntityAlias (ArrayAggregateTemplate fieldName))
|
||||||
|
pure
|
||||||
|
Join
|
||||||
|
{ joinJoinAlias =
|
||||||
|
JoinAlias
|
||||||
|
{joinAliasEntity = alias, joinAliasField = pure jsonFieldName}
|
||||||
|
, joinSource = JoinSelect joinSelect
|
||||||
|
}
|
||||||
|
where
|
||||||
|
IR.AnnRelationSelectG { aarRelationshipName
|
||||||
|
, aarColumnMapping = mapping :: HashMap ColumnName ColumnName-- PG.PGCol PG.PGCol
|
||||||
|
, aarAnnSelect = annSelectG
|
||||||
|
} = annRelationSelectG
|
||||||
|
|
||||||
|
fromArrayRelationSelectG :: IR.ArrayRelationSelectG 'MSSQL Expression -> ReaderT EntityAlias FromIr Join
|
||||||
|
fromArrayRelationSelectG annRelationSelectG = do
|
||||||
|
fieldName <- lift (fromRelName aarRelationshipName)
|
||||||
|
select <- lift (fromSelectRows annSelectG)
|
||||||
|
joinSelect <-
|
||||||
|
do foreignKeyConditions <- fromMapping (selectFrom select) mapping
|
||||||
|
pure
|
||||||
|
select {selectWhere = Where foreignKeyConditions <> selectWhere select}
|
||||||
|
alias <- lift (generateEntityAlias (ArrayRelationTemplate fieldName))
|
||||||
|
pure
|
||||||
|
Join
|
||||||
|
{ joinJoinAlias =
|
||||||
|
JoinAlias
|
||||||
|
{joinAliasEntity = alias, joinAliasField = pure jsonFieldName}
|
||||||
|
, joinSource = JoinSelect joinSelect
|
||||||
|
}
|
||||||
|
where
|
||||||
|
IR.AnnRelationSelectG { aarRelationshipName
|
||||||
|
, aarColumnMapping = mapping :: HashMap ColumnName ColumnName-- PG.PGCol PG.PGCol
|
||||||
|
, aarAnnSelect = annSelectG
|
||||||
|
} = annRelationSelectG
|
||||||
|
|
||||||
|
fromRelName :: IR.RelName -> FromIr Text
|
||||||
|
fromRelName relName =
|
||||||
|
pure (IR.relNameToTxt relName)
|
||||||
|
|
||||||
|
-- | The context given by the reader is of the previous/parent
|
||||||
|
-- "remote" table. The WHERE that we're generating goes in the child,
|
||||||
|
-- "local" query. The @From@ passed in as argument is the local table.
|
||||||
|
--
|
||||||
|
-- We should hope to see e.g. "post.category = category.id" for a
|
||||||
|
-- local table of post and a remote table of category.
|
||||||
|
--
|
||||||
|
-- The left/right columns in @HashMap PG.PGCol PG.PGCol@ corresponds
|
||||||
|
-- to the left/right of @select ... join ...@. Therefore left=remote,
|
||||||
|
-- right=local in this context.
|
||||||
|
fromMapping ::
|
||||||
|
From
|
||||||
|
-> HashMap ColumnName ColumnName-- PG.PGCol PG.PGCol
|
||||||
|
-> ReaderT EntityAlias FromIr [Expression]
|
||||||
|
fromMapping localFrom =
|
||||||
|
traverse
|
||||||
|
(\(remotePgCol, localPgCol) -> do
|
||||||
|
localFieldName <- local (const (fromAlias localFrom)) (fromPGCol localPgCol)
|
||||||
|
remoteFieldName <- fromPGCol remotePgCol
|
||||||
|
pure
|
||||||
|
(EqualExpression
|
||||||
|
(ColumnExpression localFieldName)
|
||||||
|
(ColumnExpression remoteFieldName))) .
|
||||||
|
HM.toList
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Basic SQL expression types
|
||||||
|
|
||||||
|
fromOpExpG :: Expression -> IR.OpExpG 'MSSQL Expression -> FromIr Expression
|
||||||
|
fromOpExpG expression op =
|
||||||
|
case op of
|
||||||
|
IR.ANISNULL -> pure (IsNullExpression expression)
|
||||||
|
IR.ANISNOTNULL -> pure (IsNotNullExpression expression)
|
||||||
|
IR.AEQ False val -> pure (nullableBoolEquality expression val)
|
||||||
|
IR.AEQ True val -> pure (EqualExpression expression val)
|
||||||
|
IR.ANE False val -> pure (nullableBoolInequality expression val)
|
||||||
|
IR.ANE True val -> pure (NotEqualExpression expression val)
|
||||||
|
IR.AGT val -> pure (OpExpression MoreOp expression val)
|
||||||
|
IR.ALT val -> pure (OpExpression LessOp expression val)
|
||||||
|
IR.AGTE val -> pure (OpExpression MoreOrEqualOp expression val)
|
||||||
|
IR.ALTE val -> pure (OpExpression LessOrEqualOp expression val)
|
||||||
|
IR.ACast _casts -> refute (pure (UnsupportedOpExpG op)) -- mkCastsExp casts
|
||||||
|
IR.AIN _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompareAny S.SEQ lhs val
|
||||||
|
IR.ANIN _val -> refute (pure (UnsupportedOpExpG op)) -- S.BENot $ S.BECompareAny S.SEQ lhs val
|
||||||
|
IR.ALIKE _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SLIKE lhs val
|
||||||
|
IR.ANLIKE _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SNLIKE lhs val
|
||||||
|
IR.AILIKE _ _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SILIKE lhs val
|
||||||
|
IR.ANILIKE _ _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SNILIKE lhs val
|
||||||
|
IR.ASIMILAR _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SSIMILAR lhs val
|
||||||
|
IR.ANSIMILAR _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SNSIMILAR lhs val
|
||||||
|
IR.AREGEX _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SNSIMILAR lhs val
|
||||||
|
IR.AIREGEX _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SNSIMILAR lhs val
|
||||||
|
IR.ANREGEX _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SNSIMILAR lhs val
|
||||||
|
IR.ANIREGEX _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SNSIMILAR lhs val
|
||||||
|
IR.AContains _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SContains lhs val
|
||||||
|
IR.AContainedIn _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SContainedIn lhs val
|
||||||
|
IR.AHasKey _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SHasKey lhs val
|
||||||
|
IR.AHasKeysAny _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SHasKeysAny lhs val
|
||||||
|
IR.AHasKeysAll _val -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SHasKeysAll lhs val
|
||||||
|
IR.ASTContains _val -> refute (pure (UnsupportedOpExpG op)) -- mkGeomOpBe "ST_Contains" val
|
||||||
|
IR.ASTCrosses _val -> refute (pure (UnsupportedOpExpG op)) -- mkGeomOpBe "ST_Crosses" val
|
||||||
|
IR.ASTEquals _val -> refute (pure (UnsupportedOpExpG op)) -- mkGeomOpBe "ST_Equals" val
|
||||||
|
IR.ASTIntersects _val -> refute (pure (UnsupportedOpExpG op)) -- mkGeomOpBe "ST_Intersects" val
|
||||||
|
IR.ASTOverlaps _val -> refute (pure (UnsupportedOpExpG op)) -- mkGeomOpBe "ST_Overlaps" val
|
||||||
|
IR.ASTTouches _val -> refute (pure (UnsupportedOpExpG op)) -- mkGeomOpBe "ST_Touches" val
|
||||||
|
IR.ASTWithin _val -> refute (pure (UnsupportedOpExpG op)) -- mkGeomOpBe "ST_Within" val
|
||||||
|
IR.ASTDWithinGeom {} -> refute (pure (UnsupportedOpExpG op)) -- applySQLFn "ST_DWithin" [lhs, val, r]
|
||||||
|
IR.ASTDWithinGeog {} -> refute (pure (UnsupportedOpExpG op)) -- applySQLFn "ST_DWithin" [lhs, val, r, sph]
|
||||||
|
IR.ASTIntersectsRast _val -> refute (pure (UnsupportedOpExpG op)) -- applySTIntersects [lhs, val]
|
||||||
|
IR.ASTIntersectsNbandGeom {} -> refute (pure (UnsupportedOpExpG op)) -- applySTIntersects [lhs, nband, geommin]
|
||||||
|
IR.ASTIntersectsGeomNband {} -> refute (pure (UnsupportedOpExpG op)) -- applySTIntersects [lhs, geommin, withSQLNull mNband]
|
||||||
|
IR.CEQ _rhsCol -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SEQ lhs $ mkQCol rhsCol
|
||||||
|
IR.CNE _rhsCol -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SNE lhs $ mkQCol rhsCol
|
||||||
|
IR.CGT _rhsCol -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SGT lhs $ mkQCol rhsCol
|
||||||
|
IR.CLT _rhsCol -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SLT lhs $ mkQCol rhsCol
|
||||||
|
IR.CGTE _rhsCol -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SGTE lhs $ mkQCol rhsCol
|
||||||
|
IR.CLTE _rhsCol -> refute (pure (UnsupportedOpExpG op)) -- S.BECompare S.SLTE lhs $ mkQCol rhsCol
|
||||||
|
|
||||||
|
nullableBoolEquality :: Expression -> Expression -> Expression
|
||||||
|
nullableBoolEquality x y =
|
||||||
|
OrExpression
|
||||||
|
[ EqualExpression x y
|
||||||
|
, AndExpression [IsNullExpression x, IsNullExpression y]
|
||||||
|
]
|
||||||
|
|
||||||
|
nullableBoolInequality :: Expression -> Expression -> Expression
|
||||||
|
nullableBoolInequality x y =
|
||||||
|
OrExpression
|
||||||
|
[ NotEqualExpression x y
|
||||||
|
, AndExpression [IsNotNullExpression x, IsNullExpression y]
|
||||||
|
]
|
||||||
|
|
||||||
|
fromSQLExpAsInt :: Expression -> FromIr Expression
|
||||||
|
fromSQLExpAsInt = pure
|
||||||
|
|
||||||
|
fromGBoolExp :: IR.GBoolExp 'MSSQL Expression -> ReaderT EntityAlias FromIr Expression
|
||||||
|
fromGBoolExp =
|
||||||
|
\case
|
||||||
|
IR.BoolAnd expressions ->
|
||||||
|
fmap AndExpression (traverse fromGBoolExp expressions)
|
||||||
|
IR.BoolOr expressions ->
|
||||||
|
fmap OrExpression (traverse fromGBoolExp expressions)
|
||||||
|
IR.BoolNot expression -> fmap NotExpression (fromGBoolExp expression)
|
||||||
|
IR.BoolExists gExists -> fmap ExistsExpression (fromGExists gExists)
|
||||||
|
IR.BoolFld expression -> pure expression
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Delete
|
||||||
|
|
||||||
|
fromDelete :: IR.AnnDel 'MSSQL -> FromIr Delete
|
||||||
|
fromDelete (IR.AnnDel tableName (permFilter, whereClause) _ _) = do
|
||||||
|
tableAlias <- fromTableName tableName
|
||||||
|
runReaderT
|
||||||
|
(do permissionsFilter <- fromAnnBoolExp permFilter
|
||||||
|
whereExpression <- fromAnnBoolExp whereClause
|
||||||
|
pure
|
||||||
|
Delete
|
||||||
|
{ deleteTable =
|
||||||
|
Aliased
|
||||||
|
{ aliasedAlias = entityAliasText tableAlias
|
||||||
|
, aliasedThing = tableName
|
||||||
|
}
|
||||||
|
, deleteWhere = Where [permissionsFilter, whereExpression]
|
||||||
|
})
|
||||||
|
tableAlias
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Misc combinators
|
||||||
|
|
||||||
|
trueExpression :: Expression
|
||||||
|
trueExpression = ValueExpression (ODBC.BoolValue True)
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Constants
|
||||||
|
|
||||||
|
jsonFieldName :: Text
|
||||||
|
jsonFieldName = "json"
|
||||||
|
|
||||||
|
aggFieldName :: Text
|
||||||
|
aggFieldName = "agg"
|
||||||
|
|
||||||
|
existsFieldName :: Text
|
||||||
|
existsFieldName = "exists_placeholder"
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Name generation
|
||||||
|
|
||||||
|
data NameTemplate
|
||||||
|
= ArrayRelationTemplate Text
|
||||||
|
| ArrayAggregateTemplate Text
|
||||||
|
| ObjectRelationTemplate Text
|
||||||
|
| TableTemplate Text
|
||||||
|
| ForOrderAlias Text
|
||||||
|
|
||||||
|
generateEntityAlias :: NameTemplate -> FromIr Text
|
||||||
|
generateEntityAlias template = do
|
||||||
|
FromIr (modify' (M.insertWith (+) prefix start))
|
||||||
|
i <- FromIr get
|
||||||
|
pure (prefix <> tshow (fromMaybe start (M.lookup prefix i)))
|
||||||
|
where
|
||||||
|
start = 1
|
||||||
|
prefix = T.take 20 rendered
|
||||||
|
rendered =
|
||||||
|
case template of
|
||||||
|
ArrayRelationTemplate sample -> "ar_" <> sample
|
||||||
|
ArrayAggregateTemplate sample -> "aa_" <> sample
|
||||||
|
ObjectRelationTemplate sample -> "or_" <> sample
|
||||||
|
TableTemplate sample -> "t_" <> sample
|
||||||
|
ForOrderAlias sample -> "order_" <> sample
|
||||||
|
|
||||||
|
fromAlias :: From -> EntityAlias
|
||||||
|
fromAlias (FromQualifiedTable Aliased {aliasedAlias}) = EntityAlias aliasedAlias
|
||||||
|
fromAlias (FromOpenJson Aliased {aliasedAlias}) = EntityAlias aliasedAlias
|
||||||
|
|
||||||
|
columnNameToFieldName :: ColumnName -> EntityAlias -> FieldName
|
||||||
|
columnNameToFieldName (ColumnName fieldName) EntityAlias {entityAliasText = fieldNameEntity} =
|
||||||
|
FieldName {fieldName, fieldNameEntity}
|
109
server/src-lib/Hasura/Backends/MSSQL/Instances/Execute.hs
Normal file
109
server/src-lib/Hasura/Backends/MSSQL/Instances/Execute.hs
Normal file
@ -0,0 +1,109 @@
|
|||||||
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||||
|
|
||||||
|
module Hasura.Backends.MSSQL.Instances.Execute (NoMultiplex(..)) where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Data.Environment as Env
|
||||||
|
import qualified Data.HashMap.Strict.InsOrd as OMap
|
||||||
|
import qualified Database.ODBC.SQLServer as ODBC
|
||||||
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
|
import qualified Network.HTTP.Client as HTTP
|
||||||
|
import qualified Network.HTTP.Types as HTTP
|
||||||
|
|
||||||
|
import Data.Text.Extended
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.Plan
|
||||||
|
import Hasura.Backends.MSSQL.ToQuery
|
||||||
|
import Hasura.Backends.MSSQL.Types
|
||||||
|
import Hasura.EncJSON
|
||||||
|
import Hasura.GraphQL.Context
|
||||||
|
import Hasura.GraphQL.Execute.Backend
|
||||||
|
import Hasura.GraphQL.Execute.LiveQuery.Plan
|
||||||
|
import Hasura.GraphQL.Parser
|
||||||
|
import Hasura.RQL.Types
|
||||||
|
import Hasura.Session
|
||||||
|
|
||||||
|
|
||||||
|
instance BackendExecute 'MSSQL where
|
||||||
|
type PreparedQuery 'MSSQL = Text
|
||||||
|
type MultiplexedQuery 'MSSQL = NoMultiplex
|
||||||
|
type ExecutionMonad 'MSSQL = IO
|
||||||
|
getRemoteJoins = const []
|
||||||
|
|
||||||
|
mkDBQueryPlan = msDBQueryPlan
|
||||||
|
mkDBMutationPlan = msDBMutationPlan
|
||||||
|
mkDBSubscriptionPlan = msDBSubscriptionPlan
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
-- multiplexed query
|
||||||
|
|
||||||
|
newtype NoMultiplex = NoMultiplex (G.Name, ODBC.Query)
|
||||||
|
|
||||||
|
instance ToTxt NoMultiplex where
|
||||||
|
toTxt (NoMultiplex (_name, query)) = toTxt query
|
||||||
|
|
||||||
|
|
||||||
|
-- query
|
||||||
|
|
||||||
|
msDBQueryPlan
|
||||||
|
:: forall m.
|
||||||
|
( MonadError QErr m
|
||||||
|
)
|
||||||
|
=> Env.Environment
|
||||||
|
-> HTTP.Manager
|
||||||
|
-> [HTTP.Header]
|
||||||
|
-> UserInfo
|
||||||
|
-> [G.Directive G.Name]
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> QueryDB 'MSSQL (UnpreparedValue 'MSSQL)
|
||||||
|
-> m ExecutionStep
|
||||||
|
msDBQueryPlan _env _manager _reqHeaders userInfo _directives sourceConfig qrf = do
|
||||||
|
select <- fromSelect <$> planNoPlan userInfo qrf
|
||||||
|
let queryString = ODBC.renderQuery $ toQueryPretty select
|
||||||
|
connection = _mscConnection sourceConfig
|
||||||
|
odbcQuery = ODBC.query connection (toQueryFlat select) <&> toResultJSON
|
||||||
|
pure $ ExecStepDB sourceConfig (Just queryString) [] odbcQuery
|
||||||
|
where
|
||||||
|
toResultJSON :: [Text] -> EncJSON
|
||||||
|
toResultJSON = encJFromText . mconcat
|
||||||
|
|
||||||
|
-- mutation
|
||||||
|
|
||||||
|
msDBMutationPlan
|
||||||
|
:: forall m.
|
||||||
|
( MonadError QErr m
|
||||||
|
)
|
||||||
|
=> Env.Environment
|
||||||
|
-> HTTP.Manager
|
||||||
|
-> [HTTP.Header]
|
||||||
|
-> UserInfo
|
||||||
|
-> Bool
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> MutationDB 'MSSQL (UnpreparedValue 'MSSQL)
|
||||||
|
-> m ExecutionStep
|
||||||
|
msDBMutationPlan _env _manager _reqHeaders _userInfo _stringifyNum _sourceConfig _mrf =
|
||||||
|
throw500 "mutations are not supported in MSSQL; this should be unreachable"
|
||||||
|
|
||||||
|
|
||||||
|
-- subscription
|
||||||
|
|
||||||
|
msDBSubscriptionPlan
|
||||||
|
:: forall m.
|
||||||
|
( MonadError QErr m
|
||||||
|
)
|
||||||
|
=> UserInfo
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> InsOrdHashMap G.Name (QueryDB 'MSSQL (UnpreparedValue 'MSSQL))
|
||||||
|
-> m (LiveQueryPlan 'MSSQL (MultiplexedQuery 'MSSQL))
|
||||||
|
msDBSubscriptionPlan userInfo sourceConfig rootFields = do
|
||||||
|
-- WARNING: only keeping the first root field for now!
|
||||||
|
query <- traverse mkQuery $ head $ OMap.toList rootFields
|
||||||
|
let roleName = _uiRole userInfo
|
||||||
|
parameterizedPlan = ParameterizedLiveQueryPlan roleName $ NoMultiplex query
|
||||||
|
pure
|
||||||
|
$ LiveQueryPlan parameterizedPlan sourceConfig
|
||||||
|
$ mkCohortVariables mempty mempty mempty mempty
|
||||||
|
where
|
||||||
|
mkQuery = fmap (toQueryFlat . fromSelect) . planNoPlan userInfo
|
373
server/src-lib/Hasura/Backends/MSSQL/Instances/Schema.hs
Normal file
373
server/src-lib/Hasura/Backends/MSSQL/Instances/Schema.hs
Normal file
@ -0,0 +1,373 @@
|
|||||||
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||||
|
|
||||||
|
module Hasura.Backends.MSSQL.Instances.Schema () where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Data.Aeson as J
|
||||||
|
import qualified Data.HashMap.Strict as Map
|
||||||
|
import qualified Data.List.NonEmpty as NE
|
||||||
|
import qualified Database.ODBC.SQLServer as ODBC
|
||||||
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
|
|
||||||
|
import Data.Text.Extended
|
||||||
|
|
||||||
|
import qualified Hasura.Backends.MSSQL.Types as MSSQL
|
||||||
|
import qualified Hasura.GraphQL.Parser as P
|
||||||
|
import qualified Hasura.GraphQL.Schema.Build as GSB
|
||||||
|
import qualified Hasura.RQL.IR.Select as IR
|
||||||
|
import qualified Hasura.RQL.IR.Update as IR
|
||||||
|
|
||||||
|
import Hasura.GraphQL.Context
|
||||||
|
import Hasura.GraphQL.Parser hiding (EnumValueInfo, field)
|
||||||
|
import Hasura.GraphQL.Parser.Internal.Parser hiding (field)
|
||||||
|
import Hasura.GraphQL.Schema.Backend
|
||||||
|
import Hasura.GraphQL.Schema.Common
|
||||||
|
import Hasura.RQL.Types
|
||||||
|
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
-- BackendSchema instance
|
||||||
|
|
||||||
|
instance BackendSchema 'MSSQL where
|
||||||
|
-- top level parsers
|
||||||
|
buildTableQueryFields = GSB.buildTableQueryFields
|
||||||
|
buildTableRelayQueryFields = msBuildTableRelayQueryFields
|
||||||
|
buildTableInsertMutationFields = msBuildTableInsertMutationFields
|
||||||
|
buildTableUpdateMutationFields = msBuildTableUpdateMutationFields
|
||||||
|
buildTableDeleteMutationFields = msBuildTableDeleteMutationFields
|
||||||
|
buildFunctionQueryFields = msBuildFunctionQueryFields
|
||||||
|
buildFunctionRelayQueryFields = msBuildFunctionRelayQueryFields
|
||||||
|
buildFunctionMutationFields = msBuildFunctionMutationFields
|
||||||
|
-- backend extensions
|
||||||
|
relayExtension = const Nothing
|
||||||
|
nodesAggExtension = const Nothing
|
||||||
|
-- indivdual components
|
||||||
|
columnParser = msColumnParser
|
||||||
|
jsonPathArg = msJsonPathArg
|
||||||
|
orderByOperators = msOrderByOperators
|
||||||
|
comparisonExps = msComparisonExps
|
||||||
|
updateOperators = msUpdateOperators
|
||||||
|
offsetParser = msOffsetParser
|
||||||
|
mkCountType = msMkCountType
|
||||||
|
aggregateOrderByCountType = MSSQL.IntegerType
|
||||||
|
computedField = msComputedField
|
||||||
|
node = msNode
|
||||||
|
tableDistinctOn = msTableDistinctOn
|
||||||
|
remoteRelationshipField = msRemoteRelationshipField
|
||||||
|
-- SQL literals
|
||||||
|
columnDefaultValue = msColumnDefaultValue
|
||||||
|
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
-- Top level parsers
|
||||||
|
|
||||||
|
msBuildTableRelayQueryFields
|
||||||
|
:: MonadBuildSchema 'MSSQL r m n
|
||||||
|
=> SourceName
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> TableInfo 'MSSQL
|
||||||
|
-> G.Name
|
||||||
|
-> NESeq (ColumnInfo 'MSSQL)
|
||||||
|
-> SelPermInfo 'MSSQL
|
||||||
|
-> m (Maybe (FieldParser n (QueryRootField UnpreparedValue)))
|
||||||
|
msBuildTableRelayQueryFields _sourceName _sourceInfo _tableName _tableInfo _gqlName _pkeyColumns _selPerms =
|
||||||
|
pure Nothing
|
||||||
|
|
||||||
|
msBuildTableInsertMutationFields
|
||||||
|
:: MonadBuildSchema 'MSSQL r m n
|
||||||
|
=> SourceName
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> TableInfo 'MSSQL
|
||||||
|
-> G.Name
|
||||||
|
-> InsPermInfo 'MSSQL
|
||||||
|
-> Maybe (SelPermInfo 'MSSQL)
|
||||||
|
-> Maybe (UpdPermInfo 'MSSQL)
|
||||||
|
-> m [FieldParser n (MutationRootField UnpreparedValue)]
|
||||||
|
msBuildTableInsertMutationFields _sourceName _sourceInfo _tableName _tableInfo _gqlName _insPerms _selPerms _updPerms =
|
||||||
|
pure []
|
||||||
|
|
||||||
|
msBuildTableUpdateMutationFields
|
||||||
|
:: MonadBuildSchema 'MSSQL r m n
|
||||||
|
=> SourceName
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> TableInfo 'MSSQL
|
||||||
|
-> G.Name
|
||||||
|
-> UpdPermInfo 'MSSQL
|
||||||
|
-> Maybe (SelPermInfo 'MSSQL)
|
||||||
|
-> m [FieldParser n (MutationRootField UnpreparedValue)]
|
||||||
|
msBuildTableUpdateMutationFields _sourceName _sourceInfo _tableName _tableInfo _gqlName _updPerns _selPerms =
|
||||||
|
pure []
|
||||||
|
|
||||||
|
msBuildTableDeleteMutationFields
|
||||||
|
:: MonadBuildSchema 'MSSQL r m n
|
||||||
|
=> SourceName
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> TableInfo 'MSSQL
|
||||||
|
-> G.Name
|
||||||
|
-> DelPermInfo 'MSSQL
|
||||||
|
-> Maybe (SelPermInfo 'MSSQL)
|
||||||
|
-> m [FieldParser n (MutationRootField UnpreparedValue)]
|
||||||
|
msBuildTableDeleteMutationFields _sourceName _sourceInfo _tableName _tableInfo _gqlName _delPerns _selPerms =
|
||||||
|
pure []
|
||||||
|
|
||||||
|
msBuildFunctionQueryFields
|
||||||
|
:: MonadBuildSchema 'MSSQL r m n
|
||||||
|
=> SourceName
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> FunctionName 'MSSQL
|
||||||
|
-> FunctionInfo 'MSSQL
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> SelPermInfo 'MSSQL
|
||||||
|
-> m [FieldParser n (QueryRootField UnpreparedValue)]
|
||||||
|
msBuildFunctionQueryFields _ _ _ _ _ _ =
|
||||||
|
pure []
|
||||||
|
|
||||||
|
msBuildFunctionRelayQueryFields
|
||||||
|
:: MonadBuildSchema 'MSSQL r m n
|
||||||
|
=> SourceName
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> FunctionName 'MSSQL
|
||||||
|
-> FunctionInfo 'MSSQL
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> NESeq (ColumnInfo 'MSSQL)
|
||||||
|
-> SelPermInfo 'MSSQL
|
||||||
|
-> m (Maybe (FieldParser n (QueryRootField UnpreparedValue)))
|
||||||
|
msBuildFunctionRelayQueryFields _sourceName _sourceInfo _functionName _functionInfo _tableName _pkeyColumns _selPerms =
|
||||||
|
pure Nothing
|
||||||
|
|
||||||
|
msBuildFunctionMutationFields
|
||||||
|
:: MonadBuildSchema 'MSSQL r m n
|
||||||
|
=> SourceName
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> FunctionName 'MSSQL
|
||||||
|
-> FunctionInfo 'MSSQL
|
||||||
|
-> TableName 'MSSQL
|
||||||
|
-> SelPermInfo 'MSSQL
|
||||||
|
-> m [FieldParser n (MutationRootField UnpreparedValue)]
|
||||||
|
msBuildFunctionMutationFields _ _ _ _ _ _ =
|
||||||
|
pure []
|
||||||
|
|
||||||
|
mkMSSQLScalarTypeName :: MonadError QErr m => MSSQL.ScalarType -> m G.Name
|
||||||
|
mkMSSQLScalarTypeName = \case
|
||||||
|
MSSQL.WcharType -> pure stringScalar
|
||||||
|
MSSQL.WvarcharType -> pure stringScalar
|
||||||
|
MSSQL.WtextType -> pure stringScalar
|
||||||
|
MSSQL.FloatType -> pure floatScalar
|
||||||
|
-- integer types
|
||||||
|
MSSQL.IntegerType -> pure intScalar
|
||||||
|
-- boolean type
|
||||||
|
MSSQL.BitType -> pure boolScalar
|
||||||
|
scalarType -> G.mkName (MSSQL.scalarTypeDBName scalarType) `onNothing` throw400 ValidationFailed
|
||||||
|
("cannot use SQL type " <> scalarType <<> " in the GraphQL schema because its name is not a "
|
||||||
|
<> "valid GraphQL identifier")
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
-- Individual components
|
||||||
|
|
||||||
|
msColumnParser
|
||||||
|
:: (MonadSchema n m, MonadError QErr m)
|
||||||
|
=> ColumnType 'MSSQL
|
||||||
|
-> G.Nullability
|
||||||
|
-> m (Parser 'Both n (Opaque (ColumnValue 'MSSQL)))
|
||||||
|
msColumnParser columnType (G.Nullability isNullable) =
|
||||||
|
opaque . fmap (ColumnValue columnType) <$> case columnType of
|
||||||
|
ColumnScalar scalarType -> possiblyNullable scalarType <$> case scalarType of
|
||||||
|
MSSQL.WcharType -> pure (ODBC.TextValue <$> P.string)
|
||||||
|
MSSQL.WvarcharType -> pure (ODBC.TextValue <$> P.string)
|
||||||
|
MSSQL.WtextType -> pure (ODBC.TextValue <$> P.string)
|
||||||
|
-- text
|
||||||
|
MSSQL.FloatType -> pure (ODBC.DoubleValue <$> P.float)
|
||||||
|
-- integer types
|
||||||
|
MSSQL.IntegerType -> pure (ODBC.IntValue . fromIntegral <$> P.int)
|
||||||
|
-- boolean type
|
||||||
|
MSSQL.BitType -> pure (ODBC.BoolValue <$> P.boolean)
|
||||||
|
_ -> do
|
||||||
|
name <- mkMSSQLScalarTypeName scalarType
|
||||||
|
let schemaType = P.NonNullable $ P.TNamed $ P.mkDefinition name Nothing P.TIScalar
|
||||||
|
pure $ Parser
|
||||||
|
{ pType = schemaType
|
||||||
|
, pParser =
|
||||||
|
valueToJSON (P.toGraphQLType schemaType) >=>
|
||||||
|
either (parseErrorWith ParseFailed . qeError) pure . (MSSQL.parseScalarValue scalarType)
|
||||||
|
}
|
||||||
|
ColumnEnumReference (EnumReference tableName enumValues) ->
|
||||||
|
case nonEmpty (Map.toList enumValues) of
|
||||||
|
Just enumValuesList -> do
|
||||||
|
tableGQLName <- tableGraphQLName tableName `onLeft` throwError
|
||||||
|
let enumName = tableGQLName <> $$(G.litName "_enum")
|
||||||
|
pure $ possiblyNullable MSSQL.VarcharType $ P.enum enumName Nothing (mkEnumValue <$> enumValuesList)
|
||||||
|
Nothing -> throw400 ValidationFailed "empty enum values"
|
||||||
|
where
|
||||||
|
-- Sadly, this combinator is not sound in general, so we can’t export it
|
||||||
|
-- for general-purpose use. If we did, someone could write this:
|
||||||
|
--
|
||||||
|
-- mkParameter <$> opaque do
|
||||||
|
-- n <- int
|
||||||
|
-- pure (mkIntColumnValue (n + 1))
|
||||||
|
--
|
||||||
|
-- Now we’d end up with a UVParameter that has a variable in it, so we’d
|
||||||
|
-- parameterize over it. But when we’d reuse the plan, we wouldn’t know to
|
||||||
|
-- increment the value by 1, so we’d use the wrong value!
|
||||||
|
--
|
||||||
|
-- We could theoretically solve this by retaining a reference to the parser
|
||||||
|
-- itself and re-parsing each new value, using the saved parser, which
|
||||||
|
-- would admittedly be neat. But it’s more complicated, and it isn’t clear
|
||||||
|
-- that it would actually be useful, so for now we don’t support it.
|
||||||
|
opaque :: MonadParse m => Parser 'Both m a -> Parser 'Both m (Opaque a)
|
||||||
|
opaque parser = parser
|
||||||
|
{ pParser = \case
|
||||||
|
P.GraphQLValue (G.VVariable var@Variable{ vInfo, vValue }) -> do
|
||||||
|
typeCheck False (P.toGraphQLType $ pType parser) var
|
||||||
|
P.mkOpaque (Just vInfo) <$> pParser parser (absurd <$> vValue)
|
||||||
|
value -> P.mkOpaque Nothing <$> pParser parser value
|
||||||
|
}
|
||||||
|
possiblyNullable _scalarType
|
||||||
|
| isNullable = fmap (fromMaybe ODBC.NullValue) . P.nullable
|
||||||
|
| otherwise = id
|
||||||
|
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, ScalarValue 'MSSQL)
|
||||||
|
mkEnumValue (EnumValue value, EnumValueInfo description) =
|
||||||
|
( P.mkDefinition value (G.Description <$> description) P.EnumValueInfo
|
||||||
|
, ODBC.TextValue $ G.unName value
|
||||||
|
)
|
||||||
|
|
||||||
|
msJsonPathArg
|
||||||
|
:: MonadParse n
|
||||||
|
=> ColumnType 'MSSQL
|
||||||
|
-> InputFieldsParser n (Maybe (IR.ColumnOp 'MSSQL))
|
||||||
|
msJsonPathArg _columnType = pure Nothing
|
||||||
|
|
||||||
|
msOrderByOperators
|
||||||
|
:: NonEmpty
|
||||||
|
( Definition P.EnumValueInfo
|
||||||
|
, (BasicOrderType 'MSSQL, NullsOrderType 'MSSQL)
|
||||||
|
)
|
||||||
|
msOrderByOperators = NE.fromList
|
||||||
|
[ ( define $$(G.litName "asc") "in ascending order, nulls first"
|
||||||
|
, (MSSQL.AscOrder, MSSQL.NullsFirst)
|
||||||
|
)
|
||||||
|
, ( define $$(G.litName "asc_nulls_first") "in ascending order, nulls first"
|
||||||
|
, (MSSQL.AscOrder, MSSQL.NullsFirst)
|
||||||
|
)
|
||||||
|
, ( define $$(G.litName "asc_nulls_last") "in ascending order, nulls last"
|
||||||
|
, (MSSQL.AscOrder, MSSQL.NullsLast)
|
||||||
|
)
|
||||||
|
, ( define $$(G.litName "desc") "in descending order, nulls last"
|
||||||
|
, (MSSQL.DescOrder, MSSQL.NullsLast)
|
||||||
|
)
|
||||||
|
, ( define $$(G.litName "desc_nulls_first") "in descending order, nulls first"
|
||||||
|
, (MSSQL.DescOrder, MSSQL.NullsFirst)
|
||||||
|
)
|
||||||
|
, ( define $$(G.litName "desc_nulls_last") "in descending order, nulls last"
|
||||||
|
, (MSSQL.DescOrder, MSSQL.NullsLast)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
where
|
||||||
|
define name desc = P.mkDefinition name (Just desc) P.EnumValueInfo
|
||||||
|
|
||||||
|
msComparisonExps
|
||||||
|
:: forall m n
|
||||||
|
. (BackendSchema 'MSSQL, MonadSchema n m, MonadError QErr m)
|
||||||
|
=> ColumnType 'MSSQL
|
||||||
|
-> m (Parser 'Input n [ComparisonExp 'MSSQL])
|
||||||
|
msComparisonExps = P.memoize 'comparisonExps \columnType -> do
|
||||||
|
-- see Note [Columns in comparison expression are never nullable]
|
||||||
|
typedParser <- columnParser columnType (G.Nullability False)
|
||||||
|
nullableTextParser <- columnParser (ColumnScalar MSSQL.VarcharType) (G.Nullability True)
|
||||||
|
textParser <- columnParser (ColumnScalar MSSQL.VarcharType) (G.Nullability False)
|
||||||
|
let name = P.getName typedParser <> $$(G.litName "_MSSQL_comparison_exp")
|
||||||
|
desc = G.Description $ "Boolean expression to compare columns of type "
|
||||||
|
<> P.getName typedParser
|
||||||
|
<<> ". All fields are combined with logical 'AND'."
|
||||||
|
textListParser = P.list textParser `P.bind` traverse P.openOpaque
|
||||||
|
columnListParser = P.list typedParser `P.bind` traverse P.openOpaque
|
||||||
|
pure $ P.object name (Just desc) $ catMaybes <$> sequenceA
|
||||||
|
[ P.fieldOptional $$(G.litName "_is_null") Nothing (bool ANISNOTNULL ANISNULL <$> P.boolean)
|
||||||
|
, P.fieldOptional $$(G.litName "_eq") Nothing (AEQ True . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_neq") Nothing (ANE True . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_gt") Nothing (AGT . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_lt") Nothing (ALT . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_gte") Nothing (AGTE . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_lte") Nothing (ALTE . mkParameter <$> typedParser)
|
||||||
|
]
|
||||||
|
|
||||||
|
msOffsetParser :: MonadParse n => Parser 'Both n (SQLExpression 'MSSQL)
|
||||||
|
msOffsetParser = MSSQL.ValueExpression . ODBC.IntValue . fromIntegral <$> P.int
|
||||||
|
|
||||||
|
msMkCountType
|
||||||
|
:: Maybe Bool
|
||||||
|
-- ^ distinct values
|
||||||
|
-> Maybe [Column 'MSSQL]
|
||||||
|
-> CountType 'MSSQL
|
||||||
|
msMkCountType _ Nothing = MSSQL.StarCountable
|
||||||
|
msMkCountType (Just True) (Just cols) =
|
||||||
|
maybe MSSQL.StarCountable MSSQL.DistinctCountable $ nonEmpty cols
|
||||||
|
msMkCountType _ (Just cols) =
|
||||||
|
maybe MSSQL.StarCountable MSSQL.NonNullFieldCountable $ nonEmpty cols
|
||||||
|
|
||||||
|
-- | Argument to distinct select on columns returned from table selection
|
||||||
|
-- > distinct_on: [table_select_column!]
|
||||||
|
msTableDistinctOn
|
||||||
|
-- :: forall m n. (BackendSchema 'MSSQL, MonadSchema n m, MonadTableInfo r m, MonadRole r m)
|
||||||
|
:: Applicative m
|
||||||
|
=> Applicative n
|
||||||
|
=> TableName 'MSSQL
|
||||||
|
-> SelPermInfo 'MSSQL
|
||||||
|
-> m (InputFieldsParser n (Maybe (XDistinct 'MSSQL, NonEmpty (Column 'MSSQL))))
|
||||||
|
msTableDistinctOn _table _selectPermissions = pure (pure Nothing)
|
||||||
|
|
||||||
|
-- | Various update operators
|
||||||
|
msUpdateOperators
|
||||||
|
-- :: forall m n r. (MonadSchema n m, MonadTableInfo r m)
|
||||||
|
:: Applicative m
|
||||||
|
=> TableName 'MSSQL -- ^ qualified name of the table
|
||||||
|
-> UpdPermInfo 'MSSQL -- ^ update permissions of the table
|
||||||
|
-> m (Maybe (InputFieldsParser n [(Column 'MSSQL, IR.UpdOpExpG (UnpreparedValue 'MSSQL))]))
|
||||||
|
msUpdateOperators _table _updatePermissions = pure Nothing
|
||||||
|
|
||||||
|
-- | Computed field parser.
|
||||||
|
-- Currently unsupported: returns Nothing for now.
|
||||||
|
msComputedField
|
||||||
|
:: MonadBuildSchema 'MSSQL r m n
|
||||||
|
=> ComputedFieldInfo 'MSSQL
|
||||||
|
-> SelPermInfo 'MSSQL
|
||||||
|
-> m (Maybe (FieldParser n (AnnotatedField 'MSSQL)))
|
||||||
|
msComputedField _fieldInfo _selectPemissions = pure Nothing
|
||||||
|
|
||||||
|
-- | Remote join field parser.
|
||||||
|
-- Currently unsupported: returns Nothing for now.
|
||||||
|
msRemoteRelationshipField
|
||||||
|
:: MonadBuildSchema 'MSSQL r m n
|
||||||
|
=> RemoteFieldInfo 'MSSQL
|
||||||
|
-> m (Maybe [FieldParser n (AnnotatedField 'MSSQL)])
|
||||||
|
msRemoteRelationshipField _remoteFieldInfo = pure Nothing
|
||||||
|
|
||||||
|
-- | The 'node' root field of a Relay request. Relay is currently unsupported on MSSQL,
|
||||||
|
-- meaning this parser will never be called: any attempt to create this parser should
|
||||||
|
-- therefore fail.
|
||||||
|
msNode
|
||||||
|
:: MonadBuildSchema 'MSSQL r m n
|
||||||
|
=> m ( Parser 'Output n
|
||||||
|
( HashMap
|
||||||
|
( TableName 'MSSQL)
|
||||||
|
( SourceName, SourceConfig 'MSSQL
|
||||||
|
, SelPermInfo 'MSSQL
|
||||||
|
, PrimaryKeyColumns 'MSSQL
|
||||||
|
, AnnotatedFields 'MSSQL
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
msNode = throw500 "MSSQL does not support relay; `node` should never be exposed in the schema."
|
||||||
|
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
-- SQL literals
|
||||||
|
|
||||||
|
-- FIXME: this is nonsensical for MSSQL, we'll need to adjust the corresponding mutation
|
||||||
|
-- and its representation.
|
||||||
|
msColumnDefaultValue :: Column 'MSSQL -> SQLExpression 'MSSQL
|
||||||
|
msColumnDefaultValue = const $ MSSQL.ValueExpression ODBC.NullValue
|
108
server/src-lib/Hasura/Backends/MSSQL/Instances/Transport.hs
Normal file
108
server/src-lib/Hasura/Backends/MSSQL/Instances/Transport.hs
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||||
|
|
||||||
|
module Hasura.Backends.MSSQL.Instances.Transport () where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Control.Exception as E
|
||||||
|
import qualified Data.ByteString as B
|
||||||
|
import qualified Database.ODBC.SQLServer as ODBC
|
||||||
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
|
|
||||||
|
import Data.Text.Encoding (encodeUtf8)
|
||||||
|
import Data.Text.Extended
|
||||||
|
import Hasura.RQL.Types.Error as HE
|
||||||
|
|
||||||
|
import qualified Hasura.Logging as L
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.Instances.Execute
|
||||||
|
import Hasura.Backends.MSSQL.Types
|
||||||
|
import Hasura.EncJSON
|
||||||
|
import Hasura.GraphQL.Execute.Backend
|
||||||
|
import Hasura.GraphQL.Execute.LiveQuery.Plan
|
||||||
|
import Hasura.GraphQL.Logging (MonadQueryLog (..))
|
||||||
|
import Hasura.GraphQL.Transport.Backend
|
||||||
|
import Hasura.GraphQL.Transport.HTTP.Protocol
|
||||||
|
import Hasura.RQL.Types
|
||||||
|
import Hasura.Server.Types (RequestId)
|
||||||
|
import Hasura.Session
|
||||||
|
import Hasura.Tracing
|
||||||
|
|
||||||
|
|
||||||
|
instance BackendTransport 'MSSQL where
|
||||||
|
runDBQuery = runQuery
|
||||||
|
runDBMutation = runMutation
|
||||||
|
runDBSubscription = runSubscription
|
||||||
|
|
||||||
|
runQuery
|
||||||
|
:: ( MonadIO m
|
||||||
|
, MonadQueryLog m
|
||||||
|
, MonadTrace m
|
||||||
|
, MonadError QErr m
|
||||||
|
)
|
||||||
|
=> RequestId
|
||||||
|
-> GQLReqUnparsed
|
||||||
|
-> G.Name
|
||||||
|
-> UserInfo
|
||||||
|
-> L.Logger L.Hasura
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> IO EncJSON
|
||||||
|
-> Maybe Text
|
||||||
|
-> m (DiffTime, EncJSON)
|
||||||
|
-- ^ Also return the time spent in the PG query; for telemetry.
|
||||||
|
runQuery reqId query fieldName _userInfo logger _sourceConfig tx _genSql = do
|
||||||
|
-- log the generated SQL and the graphql query
|
||||||
|
-- FIXME: fix logging by making logQueryLog expect something backend agnostic!
|
||||||
|
logQueryLog logger query Nothing reqId
|
||||||
|
withElapsedTime
|
||||||
|
$ trace ("MSSQL Query for root field " <>> fieldName)
|
||||||
|
$ run tx
|
||||||
|
|
||||||
|
runMutation
|
||||||
|
:: ( MonadIO m
|
||||||
|
, MonadQueryLog m
|
||||||
|
, MonadTrace m
|
||||||
|
, MonadError QErr m
|
||||||
|
)
|
||||||
|
=> RequestId
|
||||||
|
-> GQLReqUnparsed
|
||||||
|
-> G.Name
|
||||||
|
-> UserInfo
|
||||||
|
-> L.Logger L.Hasura
|
||||||
|
-> SourceConfig 'MSSQL
|
||||||
|
-> IO EncJSON
|
||||||
|
-> Maybe Text
|
||||||
|
-> m (DiffTime, EncJSON)
|
||||||
|
-- ^ Also return 'Mutation' when the operation was a mutation, and the time
|
||||||
|
-- spent in the PG query; for telemetry.
|
||||||
|
runMutation reqId query fieldName _userInfo logger _sourceConfig tx _genSql = do
|
||||||
|
-- log the graphql query
|
||||||
|
logQueryLog logger query Nothing reqId
|
||||||
|
withElapsedTime
|
||||||
|
$ trace ("MSSQL Mutation for root field " <>> fieldName)
|
||||||
|
$ run tx
|
||||||
|
|
||||||
|
runSubscription
|
||||||
|
:: ( MonadIO m
|
||||||
|
)
|
||||||
|
=> SourceConfig 'MSSQL
|
||||||
|
-> MultiplexedQuery 'MSSQL
|
||||||
|
-> [(CohortId, CohortVariables)]
|
||||||
|
-> m (DiffTime, Either QErr [(CohortId, B.ByteString)])
|
||||||
|
runSubscription sourceConfig (NoMultiplex (name, query)) variables = do
|
||||||
|
let connection = _mscConnection sourceConfig
|
||||||
|
withElapsedTime $ runExceptT $ for variables $ traverse $ const $
|
||||||
|
fmap toResult $ run $ ODBC.query connection query
|
||||||
|
where
|
||||||
|
toResult :: [Text] -> B.ByteString
|
||||||
|
toResult = encodeUtf8 . addFieldName . mconcat
|
||||||
|
|
||||||
|
-- TODO: This should probably be generated from the database or should
|
||||||
|
-- probably return encjson so that encJFromAssocList can be used
|
||||||
|
addFieldName result =
|
||||||
|
"{\"" <> G.unName name <> "\":" <> result <> "}"
|
||||||
|
|
||||||
|
run :: (MonadIO m, MonadError QErr m) => IO a -> m a
|
||||||
|
run action = do
|
||||||
|
result <- liftIO $ E.try @ODBC.ODBCException action
|
||||||
|
result `onLeft` (throw400 HE.MSSQLError . tshow)
|
87
server/src-lib/Hasura/Backends/MSSQL/Instances/Types.hs
Normal file
87
server/src-lib/Hasura/Backends/MSSQL/Instances/Types.hs
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||||
|
|
||||||
|
module Hasura.Backends.MSSQL.Instances.Types where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Database.ODBC.SQLServer as ODBC
|
||||||
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
|
|
||||||
|
import Data.Aeson
|
||||||
|
|
||||||
|
import qualified Hasura.Backends.MSSQL.Connection as MSSQL
|
||||||
|
import qualified Hasura.Backends.MSSQL.Types as MSSQL
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.ToQuery ()
|
||||||
|
import Hasura.RQL.DDL.Headers ()
|
||||||
|
import Hasura.RQL.Types.Backend
|
||||||
|
import Hasura.RQL.Types.Error
|
||||||
|
import Hasura.SQL.Backend
|
||||||
|
|
||||||
|
|
||||||
|
instance Backend 'MSSQL where
|
||||||
|
type SourceConfig 'MSSQL = MSSQL.MSSQLSourceConfig
|
||||||
|
type SourceConnConfiguration 'MSSQL = MSSQL.MSSQLConnConfiguration
|
||||||
|
type Identifier 'MSSQL = Void
|
||||||
|
type Alias 'MSSQL = MSSQL.EntityAlias
|
||||||
|
type TableName 'MSSQL = MSSQL.TableName
|
||||||
|
type FunctionName 'MSSQL = MSSQL.FunctionName
|
||||||
|
type FunctionArgType 'MSSQL = Void
|
||||||
|
type ConstraintName 'MSSQL = ()
|
||||||
|
type BasicOrderType 'MSSQL = MSSQL.Order
|
||||||
|
type NullsOrderType 'MSSQL = MSSQL.NullsOrder
|
||||||
|
type CountType 'MSSQL = MSSQL.Countable MSSQL.ColumnName
|
||||||
|
type Column 'MSSQL = MSSQL.ColumnName
|
||||||
|
type ScalarValue 'MSSQL = MSSQL.Value
|
||||||
|
type ScalarType 'MSSQL = MSSQL.ScalarType
|
||||||
|
type SQLExpression 'MSSQL = MSSQL.Expression
|
||||||
|
type SQLOperator 'MSSQL = MSSQL.Op
|
||||||
|
type XAILIKE 'MSSQL = ()
|
||||||
|
type XANILIKE 'MSSQL = ()
|
||||||
|
type XComputedField 'MSSQL = Void
|
||||||
|
type XRemoteField 'MSSQL = Void
|
||||||
|
type XEventTrigger 'MSSQL = Void
|
||||||
|
type XRelay 'MSSQL = Void
|
||||||
|
type XNodesAgg 'MSSQL = Void
|
||||||
|
type XDistinct 'MSSQL = Void
|
||||||
|
|
||||||
|
backendTag :: BackendTag 'MSSQL
|
||||||
|
backendTag = MSSQLTag
|
||||||
|
|
||||||
|
functionArgScalarType :: FunctionArgType 'MSSQL -> ScalarType 'MSSQL
|
||||||
|
functionArgScalarType = absurd
|
||||||
|
|
||||||
|
isComparableType :: ScalarType 'MSSQL -> Bool
|
||||||
|
isComparableType = MSSQL.isComparableType
|
||||||
|
|
||||||
|
isNumType :: ScalarType 'MSSQL -> Bool
|
||||||
|
isNumType = MSSQL.isNumType
|
||||||
|
|
||||||
|
textToScalarValue :: Maybe Text -> ScalarValue 'MSSQL
|
||||||
|
textToScalarValue = maybe ODBC.NullValue ODBC.TextValue
|
||||||
|
|
||||||
|
parseScalarValue :: ScalarType 'MSSQL -> Value -> Either QErr (ScalarValue 'MSSQL)
|
||||||
|
parseScalarValue = MSSQL.parseScalarValue
|
||||||
|
|
||||||
|
-- TODO: Is this Postgres specific? Should it be removed from the class?
|
||||||
|
scalarValueToJSON :: ScalarValue 'MSSQL -> Value
|
||||||
|
scalarValueToJSON = error "Unexpected MSSQL error: calling scalarValueToJSON. Please report this error at https://github.com/hasura/graphql-engine/issues/6590"
|
||||||
|
|
||||||
|
functionToTable :: FunctionName 'MSSQL -> TableName 'MSSQL
|
||||||
|
functionToTable = error "Unexpected MSSQL error: calling functionToTable. Please report this error at https://github.com/hasura/graphql-engine/issues/6590"
|
||||||
|
|
||||||
|
tableToFunction :: TableName 'MSSQL -> FunctionName 'MSSQL
|
||||||
|
tableToFunction = MSSQL.tableName
|
||||||
|
|
||||||
|
tableGraphQLName :: TableName 'MSSQL -> Either QErr G.Name
|
||||||
|
tableGraphQLName = MSSQL.getGQLTableName
|
||||||
|
|
||||||
|
functionGraphQLName :: FunctionName 'MSSQL -> Either QErr G.Name
|
||||||
|
functionGraphQLName = error "Unexpected MSSQL error: calling functionGraphQLName. Please report this error at https://github.com/hasura/graphql-engine/issues/6590"
|
||||||
|
|
||||||
|
-- TODO: Is this Postgres specific? Should it be removed from the class?
|
||||||
|
scalarTypeGraphQLName :: ScalarType 'MSSQL -> Either QErr G.Name
|
||||||
|
scalarTypeGraphQLName = error "Unexpected MSSQL error: calling scalarTypeGraphQLName. Please report this error at https://github.com/hasura/graphql-engine/issues/6590"
|
||||||
|
|
||||||
|
snakeCaseTableName :: TableName 'MSSQL -> Text
|
||||||
|
snakeCaseTableName = MSSQL.snakeCaseTableName
|
204
server/src-lib/Hasura/Backends/MSSQL/Meta.hs
Normal file
204
server/src-lib/Hasura/Backends/MSSQL/Meta.hs
Normal file
@ -0,0 +1,204 @@
|
|||||||
|
{-# LANGUAGE ApplicativeDo #-}
|
||||||
|
|
||||||
|
-- |
|
||||||
|
|
||||||
|
module Hasura.Backends.MSSQL.Meta
|
||||||
|
( MetadataError(..)
|
||||||
|
, loadDBMetadata
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Data.HashMap.Strict as HM
|
||||||
|
import qualified Data.HashSet as HS
|
||||||
|
import qualified Data.Text.Encoding as T
|
||||||
|
import qualified Database.PG.Query as Q (sqlFromFile)
|
||||||
|
|
||||||
|
import Data.Aeson as Aeson
|
||||||
|
import Data.Aeson.Types (parseEither)
|
||||||
|
import Data.Attoparsec.ByteString
|
||||||
|
import Data.String
|
||||||
|
import Database.ODBC.SQLServer
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.Instances.Types ()
|
||||||
|
import Hasura.Backends.MSSQL.Types
|
||||||
|
import Hasura.RQL.Types.Column
|
||||||
|
import Hasura.RQL.Types.Common (OID (..))
|
||||||
|
import Hasura.RQL.Types.Table
|
||||||
|
import Hasura.SQL.Backend
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Loader
|
||||||
|
|
||||||
|
data MetadataError
|
||||||
|
= UnknownScalarType Text
|
||||||
|
deriving (Show)
|
||||||
|
|
||||||
|
loadDBMetadata :: Connection -> IO (DBTablesMetadata 'MSSQL)
|
||||||
|
loadDBMetadata conn = do
|
||||||
|
let sql = $(Q.sqlFromFile "src-rsr/mssql_table_metadata.sql")
|
||||||
|
sysTables <- queryJson conn (fromString sql)
|
||||||
|
let tables = map transformTable sysTables
|
||||||
|
pure $ HM.fromList tables
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Local types
|
||||||
|
|
||||||
|
data SysTable = SysTable
|
||||||
|
{ staName :: Text
|
||||||
|
, staObjectId :: Int
|
||||||
|
, staJoinedSysColumn :: [SysColumn]
|
||||||
|
, staJoinedSysSchema :: SysSchema
|
||||||
|
} deriving (Show, Generic)
|
||||||
|
|
||||||
|
instance FromJSON (SysTable) where
|
||||||
|
parseJSON = genericParseJSON hasuraJSON
|
||||||
|
|
||||||
|
|
||||||
|
data SysSchema = SysSchema
|
||||||
|
{ ssName :: Text
|
||||||
|
, ssSchemaId :: Int
|
||||||
|
} deriving (Show, Generic)
|
||||||
|
|
||||||
|
instance FromJSON (SysSchema) where
|
||||||
|
parseJSON = genericParseJSON hasuraJSON
|
||||||
|
|
||||||
|
|
||||||
|
data SysColumn = SysColumn
|
||||||
|
{ scName :: Text
|
||||||
|
, scColumnId :: Int
|
||||||
|
, scUserTypeId :: Int
|
||||||
|
, scIsNullable :: Bool
|
||||||
|
, scJoinedSysType :: SysType
|
||||||
|
, scJoinedForeignKeyColumns :: [SysForeignKeyColumn]
|
||||||
|
} deriving (Show, Generic)
|
||||||
|
instance FromJSON SysColumn where
|
||||||
|
parseJSON = genericParseJSON hasuraJSON
|
||||||
|
|
||||||
|
data SysType = SysType
|
||||||
|
{ styName :: Text
|
||||||
|
, stySchemaId :: Int
|
||||||
|
, styUserTypeId :: Int
|
||||||
|
} deriving (Show, Generic)
|
||||||
|
|
||||||
|
instance FromJSON (SysType) where
|
||||||
|
parseJSON = genericParseJSON hasuraJSON
|
||||||
|
|
||||||
|
|
||||||
|
data SysForeignKeyColumn = SysForeignKeyColumn
|
||||||
|
{ sfkcConstraintObjectId :: Int
|
||||||
|
, sfkcConstraintColumnId :: Int
|
||||||
|
, sfkcParentObjectId :: Int
|
||||||
|
, sfkcParentColumnId :: Int
|
||||||
|
, sfkcReferencedObjectId :: Int
|
||||||
|
, sfkcReferencedColumnId :: Int
|
||||||
|
, sfkcJoinedReferencedTableName :: Text
|
||||||
|
, sfkcJoinedReferencedColumnName :: Text
|
||||||
|
, sfkcJoinedReferencedSysSchema :: SysSchema
|
||||||
|
} deriving (Show, Generic)
|
||||||
|
|
||||||
|
instance FromJSON (SysForeignKeyColumn) where
|
||||||
|
parseJSON = genericParseJSON hasuraJSON
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Transform
|
||||||
|
|
||||||
|
transformTable :: SysTable -> (TableName, DBTableMetadata 'MSSQL)
|
||||||
|
transformTable tableInfo =
|
||||||
|
let schemaName = ssName $ staJoinedSysSchema tableInfo
|
||||||
|
tableName = TableName (staName tableInfo) schemaName
|
||||||
|
tableOID = OID $ staObjectId tableInfo
|
||||||
|
(columns, foreignKeys) = unzip $ fmap transformColumn $ staJoinedSysColumn tableInfo
|
||||||
|
in ( tableName
|
||||||
|
, DBTableMetadata
|
||||||
|
tableOID
|
||||||
|
columns
|
||||||
|
Nothing -- no primary key information?
|
||||||
|
HS.empty -- no unique constraints?
|
||||||
|
(HS.fromList $ map ForeignKeyMetadata $ HM.elems $ coalesceKeys $ concat foreignKeys)
|
||||||
|
Nothing -- no views, only tables
|
||||||
|
Nothing -- no description
|
||||||
|
)
|
||||||
|
|
||||||
|
transformColumn
|
||||||
|
:: SysColumn
|
||||||
|
-> (RawColumnInfo 'MSSQL, [ForeignKey 'MSSQL])
|
||||||
|
transformColumn columnInfo =
|
||||||
|
let prciName = ColumnName $ scName columnInfo
|
||||||
|
prciPosition = scColumnId columnInfo
|
||||||
|
-- ^ the IR uses this to uniquely identify columns, as Postgres will
|
||||||
|
-- keep a unique position for a column even when columns are added
|
||||||
|
-- or dropped. We assume here that this arbitrary column id can
|
||||||
|
-- serve the same purpose.
|
||||||
|
prciIsNullable = scIsNullable columnInfo
|
||||||
|
prciDescription = Nothing
|
||||||
|
prciType = parseScalarType $ styName $ scJoinedSysType columnInfo
|
||||||
|
foreignKeys = scJoinedForeignKeyColumns columnInfo <&> \foreignKeyColumn ->
|
||||||
|
let _fkConstraint = Constraint () {- FIXME -} $ OID $ sfkcConstraintObjectId foreignKeyColumn
|
||||||
|
-- ^ there's currently no ConstraintName type in our MSSQL code?
|
||||||
|
schemaName = ssName $ sfkcJoinedReferencedSysSchema foreignKeyColumn
|
||||||
|
_fkForeignTable = TableName (sfkcJoinedReferencedTableName foreignKeyColumn) schemaName
|
||||||
|
_fkColumnMapping = HM.singleton prciName $ ColumnName $ sfkcJoinedReferencedColumnName foreignKeyColumn
|
||||||
|
in ForeignKey {..}
|
||||||
|
in (RawColumnInfo{..}, foreignKeys)
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Helpers
|
||||||
|
|
||||||
|
coalesceKeys :: [ForeignKey 'MSSQL] -> HM.HashMap TableName (ForeignKey 'MSSQL)
|
||||||
|
coalesceKeys = foldl' coalesce HM.empty
|
||||||
|
where coalesce mapping fk@(ForeignKey _ tableName _) = HM.insertWith combine tableName fk mapping
|
||||||
|
-- is it ok to assume we can coalesce only on table name?
|
||||||
|
combine oldFK newFK = oldFK { _fkColumnMapping = (HM.union `on` _fkColumnMapping) oldFK newFK }
|
||||||
|
|
||||||
|
parseScalarType :: Text -> ScalarType
|
||||||
|
parseScalarType = \case
|
||||||
|
"char" -> CharType
|
||||||
|
"numeric" -> NumericType
|
||||||
|
"decimal" -> DecimalType
|
||||||
|
"money" -> DecimalType
|
||||||
|
"smallmoney" -> DecimalType
|
||||||
|
"int" -> IntegerType
|
||||||
|
"smallint" -> SmallintType
|
||||||
|
"float" -> FloatType
|
||||||
|
"real" -> RealType
|
||||||
|
"date" -> DateType
|
||||||
|
"time" -> Ss_time2Type
|
||||||
|
"varchar" -> VarcharType
|
||||||
|
"nchar" -> WcharType
|
||||||
|
"nvarchar" -> WvarcharType
|
||||||
|
"ntext" -> WtextType
|
||||||
|
"timestamp" -> TimestampType
|
||||||
|
"text" -> TextType
|
||||||
|
"binary" -> BinaryType
|
||||||
|
"bigint" -> BigintType
|
||||||
|
"tinyint" -> TinyintType
|
||||||
|
"varbinary" -> VarbinaryType
|
||||||
|
"bit" -> BitType
|
||||||
|
"uniqueidentifier" -> GuidType
|
||||||
|
t -> UnknownType t
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Quick catalog queries
|
||||||
|
|
||||||
|
queryJson :: FromJSON a => Connection -> Query -> IO [a]
|
||||||
|
queryJson conn query' = do
|
||||||
|
(steps, iresult) <-
|
||||||
|
stream
|
||||||
|
conn
|
||||||
|
query'
|
||||||
|
(\(!steps, parser) input ->
|
||||||
|
pure (Continue (steps + 1, feed parser (T.encodeUtf8 input))))
|
||||||
|
(0 :: Int, parse json mempty)
|
||||||
|
case steps of
|
||||||
|
0 -> pure []
|
||||||
|
_ ->
|
||||||
|
case iresult of
|
||||||
|
Done _ jvalue ->
|
||||||
|
parseEither parseJSON jvalue `onLeft` error -- FIXME
|
||||||
|
Partial {} -> error "Incomplete output from SQL Server."
|
||||||
|
Fail _ _ctx err -> error ("JSON parser error: " <> err)
|
271
server/src-lib/Hasura/Backends/MSSQL/Plan.hs
Normal file
271
server/src-lib/Hasura/Backends/MSSQL/Plan.hs
Normal file
@ -0,0 +1,271 @@
|
|||||||
|
-- | Planning T-SQL queries and subscriptions.
|
||||||
|
|
||||||
|
module Hasura.Backends.MSSQL.Plan where
|
||||||
|
-- TODO: Re-add the export list after cleaning up the module
|
||||||
|
-- ( planNoPlan
|
||||||
|
-- , planNoPlanMap
|
||||||
|
-- , planMultiplex
|
||||||
|
-- ) where
|
||||||
|
|
||||||
|
import Hasura.Prelude hiding (first)
|
||||||
|
|
||||||
|
import qualified Data.Aeson as J
|
||||||
|
import qualified Data.HashMap.Strict as HM
|
||||||
|
import qualified Data.HashMap.Strict.InsOrd as OMap
|
||||||
|
import qualified Data.HashSet as Set
|
||||||
|
import qualified Data.List.NonEmpty as NE
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import qualified Database.ODBC.SQLServer as ODBC
|
||||||
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
|
|
||||||
|
import Control.Monad.Validate
|
||||||
|
import Data.ByteString.Lazy (toStrict)
|
||||||
|
import Data.Text.Extended
|
||||||
|
|
||||||
|
import qualified Hasura.GraphQL.Parser as GraphQL
|
||||||
|
import qualified Hasura.RQL.Types.Column as RQL
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.FromIr as TSQL
|
||||||
|
import Hasura.Backends.MSSQL.Types as TSQL
|
||||||
|
import Hasura.GraphQL.Context
|
||||||
|
import Hasura.RQL.Types.Error
|
||||||
|
import Hasura.SQL.Backend
|
||||||
|
import Hasura.Session
|
||||||
|
|
||||||
|
|
||||||
|
newtype QDB v b = QDB (QueryDB b v)
|
||||||
|
|
||||||
|
type SubscriptionRootFieldMSSQL v = RootField (QDB v) Void Void {-(RQL.AnnActionAsyncQuery 'MSSQL v)-} Void
|
||||||
|
|
||||||
|
|
||||||
|
-- --------------------------------------------------------------------------------
|
||||||
|
-- -- Top-level planner
|
||||||
|
|
||||||
|
planNoPlan
|
||||||
|
:: MonadError QErr m
|
||||||
|
=> UserInfo
|
||||||
|
-> QueryDB 'MSSQL (GraphQL.UnpreparedValue 'MSSQL)
|
||||||
|
-> m Select
|
||||||
|
planNoPlan userInfo queryDB = do
|
||||||
|
rootField <- traverseQueryDB (prepareValueNoPlan (_uiSession userInfo)) queryDB
|
||||||
|
select <-
|
||||||
|
runValidate (TSQL.runFromIr (TSQL.fromRootField rootField))
|
||||||
|
`onLeft` (throw400 NotSupported . tshow)
|
||||||
|
pure
|
||||||
|
select
|
||||||
|
{ selectFor =
|
||||||
|
case selectFor select of
|
||||||
|
NoFor -> NoFor
|
||||||
|
JsonFor forJson -> JsonFor forJson {jsonRoot = Root "root"}
|
||||||
|
}
|
||||||
|
|
||||||
|
-- planMultiplex ::
|
||||||
|
-- OMap.InsOrdHashMap G.Name (SubscriptionRootFieldMSSQL (GraphQL.UnpreparedValue 'MSSQL))
|
||||||
|
-- -> Either PrepareError Select
|
||||||
|
-- planMultiplex _unpreparedMap =
|
||||||
|
-- let rootFieldMap =
|
||||||
|
-- evalState
|
||||||
|
-- (traverse
|
||||||
|
-- (traverseQueryRootField prepareValueMultiplex)
|
||||||
|
-- unpreparedMap)
|
||||||
|
-- emptyPrepareState
|
||||||
|
-- selectMap <-
|
||||||
|
-- first
|
||||||
|
-- FromIrError
|
||||||
|
-- (runValidate (TSQL.runFromIr (traverse TSQL.fromRootField rootFieldMap)))
|
||||||
|
-- pure (multiplexRootReselect (collapseMap selectMap))
|
||||||
|
|
||||||
|
-- Plan a query without prepare/exec.
|
||||||
|
-- planNoPlanMap ::
|
||||||
|
-- OMap.InsOrdHashMap G.Name (SubscriptionRootFieldMSSQL (GraphQL.UnpreparedValue 'MSSQL))
|
||||||
|
-- -> Either PrepareError Reselect
|
||||||
|
-- planNoPlanMap _unpreparedMap =
|
||||||
|
-- let rootFieldMap = runIdentity $
|
||||||
|
-- traverse (traverseQueryRootField (pure . prepareValueNoPlan)) unpreparedMap
|
||||||
|
-- selectMap <-
|
||||||
|
-- first
|
||||||
|
-- FromIrError
|
||||||
|
-- (runValidate (TSQL.runFromIr (traverse TSQL.fromRootField rootFieldMap)))
|
||||||
|
-- pure (collapseMap selectMap)
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Converting a root field into a T-SQL select statement
|
||||||
|
|
||||||
|
-- | Collapse a set of selects into a single select that projects
|
||||||
|
-- these as subselects.
|
||||||
|
collapseMap :: OMap.InsOrdHashMap G.Name Select
|
||||||
|
-> Reselect
|
||||||
|
collapseMap selects =
|
||||||
|
Reselect
|
||||||
|
{ reselectFor =
|
||||||
|
JsonFor ForJson {jsonCardinality = JsonSingleton, jsonRoot = NoRoot}
|
||||||
|
, reselectWhere = Where mempty
|
||||||
|
, reselectProjections =
|
||||||
|
map projectSelect (OMap.toList selects)
|
||||||
|
}
|
||||||
|
where
|
||||||
|
projectSelect :: (G.Name, Select) -> Projection
|
||||||
|
projectSelect (name, select) =
|
||||||
|
ExpressionProjection
|
||||||
|
(Aliased
|
||||||
|
{ aliasedThing = SelectExpression select
|
||||||
|
, aliasedAlias = G.unName name
|
||||||
|
})
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Session variables
|
||||||
|
|
||||||
|
globalSessionExpression :: TSQL.Expression
|
||||||
|
globalSessionExpression =
|
||||||
|
ValueExpression (ODBC.TextValue "current_setting('hasura.user')::json")
|
||||||
|
|
||||||
|
-- TODO: real env object.
|
||||||
|
envObjectExpression :: TSQL.Expression
|
||||||
|
envObjectExpression =
|
||||||
|
ValueExpression (ODBC.TextValue "[{\"result_id\":1,\"result_vars\":{\"synthetic\":[10]}}]")
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Resolving values
|
||||||
|
|
||||||
|
data PrepareError
|
||||||
|
= FromIrError (NonEmpty TSQL.Error)
|
||||||
|
|
||||||
|
data PrepareState = PrepareState
|
||||||
|
{ positionalArguments :: !Integer
|
||||||
|
, namedArguments :: !(HashMap G.Name (RQL.ColumnValue 'MSSQL))
|
||||||
|
, sessionVariables :: !(Set.HashSet SessionVariable)
|
||||||
|
}
|
||||||
|
|
||||||
|
emptyPrepareState :: PrepareState
|
||||||
|
emptyPrepareState =
|
||||||
|
PrepareState {positionalArguments = 0, namedArguments = mempty, sessionVariables = mempty}
|
||||||
|
|
||||||
|
-- | Prepare a value without any query planning; we just execute the
|
||||||
|
-- query with the values embedded.
|
||||||
|
prepareValueNoPlan
|
||||||
|
:: MonadError QErr m
|
||||||
|
=> SessionVariables
|
||||||
|
-> GraphQL.UnpreparedValue 'MSSQL
|
||||||
|
-> m TSQL.Expression
|
||||||
|
prepareValueNoPlan sessionVariables =
|
||||||
|
\case
|
||||||
|
GraphQL.UVLiteral x -> pure x
|
||||||
|
GraphQL.UVSession -> pure $ ValueExpression $ ODBC.ByteStringValue $ toStrict $ J.encode sessionVariables
|
||||||
|
GraphQL.UVParameter _ RQL.ColumnValue{..} -> pure $ ValueExpression cvValue
|
||||||
|
GraphQL.UVSessionVar _typ sessionVariable -> do
|
||||||
|
value <- getSessionVariableValue sessionVariable sessionVariables
|
||||||
|
`onNothing` throw400 NotFound ("missing session variable: " <>> sessionVariable)
|
||||||
|
pure $ ValueExpression $ ODBC.TextValue value
|
||||||
|
|
||||||
|
-- | Prepare a value for multiplexed queries.
|
||||||
|
prepareValueMultiplex ::
|
||||||
|
GraphQL.UnpreparedValue 'MSSQL
|
||||||
|
-> State PrepareState TSQL.Expression
|
||||||
|
prepareValueMultiplex =
|
||||||
|
\case
|
||||||
|
GraphQL.UVLiteral x -> pure x
|
||||||
|
GraphQL.UVSession ->
|
||||||
|
pure (JsonQueryExpression globalSessionExpression)
|
||||||
|
GraphQL.UVSessionVar _typ text -> do
|
||||||
|
modify' (\s -> s {sessionVariables = text `Set.insert` sessionVariables s})
|
||||||
|
pure $ JsonValueExpression globalSessionExpression (FieldPath RootPath (toTxt text))
|
||||||
|
GraphQL.UVParameter mVariableInfo pgcolumnvalue ->
|
||||||
|
case fmap GraphQL.getName mVariableInfo of
|
||||||
|
Nothing -> do
|
||||||
|
index <- gets positionalArguments
|
||||||
|
modify' (\s -> s {positionalArguments = index + 1})
|
||||||
|
pure
|
||||||
|
(JsonValueExpression
|
||||||
|
(ColumnExpression
|
||||||
|
FieldName
|
||||||
|
{ fieldNameEntity = rowAlias
|
||||||
|
, fieldName = resultVarsAlias
|
||||||
|
})
|
||||||
|
(RootPath `FieldPath` "synthetic" `IndexPath` index))
|
||||||
|
Just name -> do
|
||||||
|
modify
|
||||||
|
(\s ->
|
||||||
|
s
|
||||||
|
{ namedArguments =
|
||||||
|
HM.insert name pgcolumnvalue (namedArguments s)
|
||||||
|
})
|
||||||
|
pure
|
||||||
|
(JsonValueExpression
|
||||||
|
envObjectExpression
|
||||||
|
(RootPath `FieldPath` "query" `FieldPath` G.unName name))
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Producing the correct SQL-level list comprehension to multiplex a query
|
||||||
|
|
||||||
|
-- Problem description:
|
||||||
|
--
|
||||||
|
-- Generate a query that repeats the same query N times but with
|
||||||
|
-- certain slots replaced:
|
||||||
|
--
|
||||||
|
-- [ Select x y | (x,y) <- [..] ]
|
||||||
|
--
|
||||||
|
|
||||||
|
multiplexRootReselect :: TSQL.Reselect -> TSQL.Select
|
||||||
|
multiplexRootReselect rootReselect =
|
||||||
|
Select
|
||||||
|
{ selectTop = NoTop
|
||||||
|
, selectProjections =
|
||||||
|
[ FieldNameProjection
|
||||||
|
Aliased
|
||||||
|
{ aliasedThing =
|
||||||
|
FieldName
|
||||||
|
{fieldNameEntity = rowAlias, fieldName = resultIdAlias}
|
||||||
|
, aliasedAlias = resultIdAlias
|
||||||
|
}
|
||||||
|
, ExpressionProjection
|
||||||
|
Aliased
|
||||||
|
{ aliasedThing =
|
||||||
|
JsonQueryExpression
|
||||||
|
(ColumnExpression
|
||||||
|
(FieldName
|
||||||
|
{ fieldNameEntity = resultAlias
|
||||||
|
, fieldName = TSQL.jsonFieldName
|
||||||
|
}))
|
||||||
|
, aliasedAlias = resultAlias
|
||||||
|
}
|
||||||
|
]
|
||||||
|
, selectFrom =
|
||||||
|
FromOpenJson
|
||||||
|
Aliased
|
||||||
|
{ aliasedThing =
|
||||||
|
OpenJson
|
||||||
|
{ openJsonExpression = envObjectExpression
|
||||||
|
, openJsonWith =
|
||||||
|
NE.fromList
|
||||||
|
[IntField resultIdAlias, JsonField resultVarsAlias]
|
||||||
|
}
|
||||||
|
, aliasedAlias = rowAlias
|
||||||
|
}
|
||||||
|
, selectJoins =
|
||||||
|
[ Join
|
||||||
|
{ joinSource = JoinReselect rootReselect
|
||||||
|
, joinJoinAlias =
|
||||||
|
JoinAlias
|
||||||
|
{ joinAliasEntity = resultAlias
|
||||||
|
, joinAliasField = Just TSQL.jsonFieldName
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
, selectWhere = Where mempty
|
||||||
|
, selectFor =
|
||||||
|
JsonFor ForJson {jsonCardinality = JsonArray, jsonRoot = NoRoot}
|
||||||
|
, selectOrderBy = Nothing
|
||||||
|
, selectOffset = Nothing
|
||||||
|
}
|
||||||
|
|
||||||
|
resultIdAlias :: T.Text
|
||||||
|
resultIdAlias = "result_id"
|
||||||
|
|
||||||
|
resultVarsAlias :: T.Text
|
||||||
|
resultVarsAlias = "result_vars"
|
||||||
|
|
||||||
|
resultAlias :: T.Text
|
||||||
|
resultAlias = "result"
|
||||||
|
|
||||||
|
rowAlias :: T.Text
|
||||||
|
rowAlias = "row"
|
22
server/src-lib/Hasura/Backends/MSSQL/Result.hs
Normal file
22
server/src-lib/Hasura/Backends/MSSQL/Result.hs
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
module Hasura.Backends.MSSQL.Result where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Data.Aeson as J
|
||||||
|
import qualified Database.ODBC.Internal as ODBC
|
||||||
|
|
||||||
|
|
||||||
|
odbcValueToJValue :: ODBC.Value -> J.Value
|
||||||
|
odbcValueToJValue = \case
|
||||||
|
ODBC.TextValue t -> J.String t
|
||||||
|
ODBC.ByteStringValue b -> J.String $ bsToTxt b
|
||||||
|
ODBC.BinaryValue b -> J.String $ bsToTxt $ ODBC.unBinary b
|
||||||
|
ODBC.BoolValue b -> J.Bool b
|
||||||
|
ODBC.DoubleValue d -> J.toJSON d
|
||||||
|
ODBC.FloatValue f -> J.toJSON f
|
||||||
|
ODBC.IntValue i -> J.toJSON i
|
||||||
|
ODBC.ByteValue b -> J.toJSON b
|
||||||
|
ODBC.DayValue d -> J.toJSON d
|
||||||
|
ODBC.TimeOfDayValue td -> J.toJSON td
|
||||||
|
ODBC.LocalTimeValue l -> J.toJSON l
|
||||||
|
ODBC.NullValue -> J.Null
|
372
server/src-lib/Hasura/Backends/MSSQL/ToQuery.hs
Normal file
372
server/src-lib/Hasura/Backends/MSSQL/ToQuery.hs
Normal file
@ -0,0 +1,372 @@
|
|||||||
|
{-# LANGUAGE OverloadedStrings #-}
|
||||||
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||||
|
|
||||||
|
-- | Convert the simple T-SQL AST to an SQL query, ready to be passed
|
||||||
|
-- to the odbc package's query/exec functions.
|
||||||
|
|
||||||
|
module Hasura.Backends.MSSQL.ToQuery
|
||||||
|
( fromSelect
|
||||||
|
, fromReselect
|
||||||
|
, toQueryFlat
|
||||||
|
, toQueryPretty
|
||||||
|
, fromDelete
|
||||||
|
, Printer(..)
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import qualified Data.Text.Lazy as LT
|
||||||
|
import qualified Data.Text.Lazy.Builder as LT
|
||||||
|
|
||||||
|
import Data.List (intersperse)
|
||||||
|
import Data.String
|
||||||
|
import Database.ODBC.SQLServer
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.Types
|
||||||
|
import Hasura.SQL.Types (ToSQL (..))
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Types
|
||||||
|
|
||||||
|
data Printer
|
||||||
|
= SeqPrinter [Printer]
|
||||||
|
| SepByPrinter Printer [Printer]
|
||||||
|
| NewlinePrinter
|
||||||
|
| QueryPrinter Query
|
||||||
|
| IndentPrinter Int Printer
|
||||||
|
deriving (Show, Eq)
|
||||||
|
|
||||||
|
instance IsString Printer where
|
||||||
|
fromString = QueryPrinter . fromString
|
||||||
|
|
||||||
|
(<+>) :: Printer -> Printer -> Printer
|
||||||
|
(<+>) x y = SeqPrinter [x,y]
|
||||||
|
|
||||||
|
(<+>?) :: Printer -> Maybe Printer -> Printer
|
||||||
|
(<+>?) x Nothing = x
|
||||||
|
(<+>?) x (Just y) = SeqPrinter [x,y]
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Instances
|
||||||
|
|
||||||
|
instance ToSQL Expression where
|
||||||
|
toSQL = fromString . show . toQueryFlat . fromExpression
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Printer generators
|
||||||
|
|
||||||
|
fromExpression :: Expression -> Printer
|
||||||
|
fromExpression =
|
||||||
|
\case
|
||||||
|
JsonQueryExpression e -> "JSON_QUERY(" <+> fromExpression e <+> ")"
|
||||||
|
JsonValueExpression e path ->
|
||||||
|
"JSON_VALUE(" <+> fromExpression e <+> fromPath path <+> ")"
|
||||||
|
ValueExpression value -> QueryPrinter (toSql value)
|
||||||
|
AndExpression xs ->
|
||||||
|
SepByPrinter
|
||||||
|
(NewlinePrinter <+> "AND ")
|
||||||
|
(toList
|
||||||
|
(fmap
|
||||||
|
(\x -> "(" <+> fromExpression x <+> ")")
|
||||||
|
(fromMaybe (pure trueExpression) (nonEmpty xs))))
|
||||||
|
OrExpression xs ->
|
||||||
|
SepByPrinter
|
||||||
|
(NewlinePrinter <+> " OR ")
|
||||||
|
(toList
|
||||||
|
(fmap
|
||||||
|
(\x -> "(" <+> fromExpression x <+> ")")
|
||||||
|
(fromMaybe (pure falseExpression) (nonEmpty xs))))
|
||||||
|
NotExpression expression -> "NOT " <+> (fromExpression expression)
|
||||||
|
ExistsExpression select -> "EXISTS (" <+> fromSelect select <+> ")"
|
||||||
|
IsNullExpression expression ->
|
||||||
|
"(" <+> fromExpression expression <+> ") IS NULL"
|
||||||
|
IsNotNullExpression expression ->
|
||||||
|
"(" <+> fromExpression expression <+> ") IS NOT NULL"
|
||||||
|
ColumnExpression fieldName -> fromFieldName fieldName
|
||||||
|
EqualExpression x y ->
|
||||||
|
"(" <+> fromExpression x <+> ") = (" <+> fromExpression y <+> ")"
|
||||||
|
NotEqualExpression x y ->
|
||||||
|
"(" <+> fromExpression x <+> ") != (" <+> fromExpression y <+> ")"
|
||||||
|
ToStringExpression e -> "CONCAT(" <+> fromExpression e <+> ", '')"
|
||||||
|
SelectExpression s -> "(" <+> IndentPrinter 1 (fromSelect s) <+> ")"
|
||||||
|
OpExpression op x y ->
|
||||||
|
"(" <+>
|
||||||
|
fromExpression x <+>
|
||||||
|
") " <+> fromOp op <+> " (" <+> fromExpression y <+> ")"
|
||||||
|
|
||||||
|
fromOp :: Op -> Printer
|
||||||
|
fromOp =
|
||||||
|
\case
|
||||||
|
LessOp -> "<"
|
||||||
|
MoreOp -> ">"
|
||||||
|
MoreOrEqualOp -> ">="
|
||||||
|
LessOrEqualOp -> "<="
|
||||||
|
|
||||||
|
fromPath :: JsonPath -> Printer
|
||||||
|
fromPath path =
|
||||||
|
", " <+> string path
|
||||||
|
where
|
||||||
|
string = fromExpression .
|
||||||
|
ValueExpression . TextValue . LT.toStrict . LT.toLazyText . go
|
||||||
|
go =
|
||||||
|
\case
|
||||||
|
RootPath -> "$"
|
||||||
|
IndexPath r i -> go r <> "[" <> LT.fromString (show i) <> "]"
|
||||||
|
FieldPath r f -> go r <> ".\"" <> LT.fromText f <> "\""
|
||||||
|
|
||||||
|
fromFieldName :: FieldName -> Printer
|
||||||
|
fromFieldName (FieldName {..}) =
|
||||||
|
fromNameText fieldNameEntity <+> "." <+> fromNameText fieldName
|
||||||
|
|
||||||
|
fromDelete :: Delete -> Printer
|
||||||
|
fromDelete Delete {deleteTable, deleteWhere} =
|
||||||
|
SepByPrinter
|
||||||
|
NewlinePrinter
|
||||||
|
[ "DELETE " <+> fromNameText (aliasedAlias deleteTable)
|
||||||
|
, "FROM " <+> fromAliased (fmap fromTableName deleteTable)
|
||||||
|
, fromWhere deleteWhere
|
||||||
|
]
|
||||||
|
|
||||||
|
fromSelect :: Select -> Printer
|
||||||
|
fromSelect Select {..} =
|
||||||
|
SepByPrinter
|
||||||
|
NewlinePrinter
|
||||||
|
[ "SELECT " <+>
|
||||||
|
IndentPrinter
|
||||||
|
7
|
||||||
|
(SepByPrinter
|
||||||
|
("," <+> NewlinePrinter)
|
||||||
|
(map fromProjection (toList selectProjections)))
|
||||||
|
, "FROM " <+> IndentPrinter 5 (fromFrom selectFrom)
|
||||||
|
, SepByPrinter
|
||||||
|
NewlinePrinter
|
||||||
|
(map
|
||||||
|
(\Join {..} ->
|
||||||
|
SeqPrinter
|
||||||
|
[ "OUTER APPLY ("
|
||||||
|
, IndentPrinter 13 (fromJoinSource joinSource)
|
||||||
|
, ") "
|
||||||
|
, NewlinePrinter
|
||||||
|
, "AS "
|
||||||
|
, fromJoinAlias joinJoinAlias
|
||||||
|
])
|
||||||
|
selectJoins)
|
||||||
|
, fromWhere selectWhere
|
||||||
|
, fromOrderBys selectTop selectOffset selectOrderBy
|
||||||
|
, fromFor selectFor
|
||||||
|
]
|
||||||
|
|
||||||
|
fromJoinSource :: JoinSource -> Printer
|
||||||
|
fromJoinSource =
|
||||||
|
\case
|
||||||
|
JoinSelect select -> fromSelect select
|
||||||
|
JoinReselect reselect -> fromReselect reselect
|
||||||
|
|
||||||
|
fromReselect :: Reselect -> Printer
|
||||||
|
fromReselect Reselect {..} =
|
||||||
|
SepByPrinter
|
||||||
|
NewlinePrinter
|
||||||
|
[ "SELECT " <+>
|
||||||
|
IndentPrinter
|
||||||
|
7
|
||||||
|
(SepByPrinter
|
||||||
|
("," <+> NewlinePrinter)
|
||||||
|
(map fromProjection (toList reselectProjections)))
|
||||||
|
, fromFor reselectFor
|
||||||
|
, fromWhere reselectWhere
|
||||||
|
]
|
||||||
|
|
||||||
|
fromOrderBys ::
|
||||||
|
Top -> Maybe Expression -> Maybe (NonEmpty OrderBy) -> Printer
|
||||||
|
fromOrderBys NoTop Nothing Nothing = "" -- An ORDER BY is wasteful if not needed.
|
||||||
|
fromOrderBys top moffset morderBys =
|
||||||
|
SeqPrinter
|
||||||
|
[ "ORDER BY "
|
||||||
|
, IndentPrinter
|
||||||
|
9
|
||||||
|
(SepByPrinter
|
||||||
|
NewlinePrinter
|
||||||
|
[ case morderBys of
|
||||||
|
Nothing -> "1"
|
||||||
|
Just orderBys ->
|
||||||
|
SepByPrinter
|
||||||
|
("," <+> NewlinePrinter)
|
||||||
|
(concatMap fromOrderBy (toList orderBys))
|
||||||
|
, case (top, moffset) of
|
||||||
|
(NoTop, Nothing) -> ""
|
||||||
|
(NoTop, Just offset) ->
|
||||||
|
"OFFSET " <+> fromExpression offset <+> " ROWS"
|
||||||
|
(Top n, Nothing) ->
|
||||||
|
"OFFSET 0 ROWS FETCH NEXT " <+>
|
||||||
|
QueryPrinter (toSql n) <+> " ROWS ONLY"
|
||||||
|
(Top n, Just offset) ->
|
||||||
|
"OFFSET " <+>
|
||||||
|
fromExpression offset <+>
|
||||||
|
" ROWS FETCH NEXT " <+> QueryPrinter (toSql n) <+> " ROWS ONLY"
|
||||||
|
])
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
fromOrderBy :: OrderBy -> [Printer]
|
||||||
|
fromOrderBy OrderBy {..} =
|
||||||
|
[ fromNullsOrder orderByFieldName orderByNullsOrder
|
||||||
|
, fromFieldName orderByFieldName <+> " " <+> fromOrder orderByOrder
|
||||||
|
]
|
||||||
|
|
||||||
|
fromOrder :: Order -> Printer
|
||||||
|
fromOrder =
|
||||||
|
\case
|
||||||
|
AscOrder -> "ASC"
|
||||||
|
DescOrder -> "DESC"
|
||||||
|
|
||||||
|
fromNullsOrder :: FieldName -> NullsOrder -> Printer
|
||||||
|
fromNullsOrder fieldName =
|
||||||
|
\case
|
||||||
|
NullsAnyOrder -> ""
|
||||||
|
NullsFirst -> "IIF(" <+> fromFieldName fieldName <+> " IS NULL, 0, 1)"
|
||||||
|
NullsLast -> "IIF(" <+> fromFieldName fieldName <+> " IS NULL, 1, 0)"
|
||||||
|
|
||||||
|
fromJoinAlias :: JoinAlias -> Printer
|
||||||
|
fromJoinAlias JoinAlias {..} =
|
||||||
|
fromNameText joinAliasEntity <+>?
|
||||||
|
fmap (\name -> "(" <+> fromNameText name <+> ")") joinAliasField
|
||||||
|
|
||||||
|
fromFor :: For -> Printer
|
||||||
|
fromFor =
|
||||||
|
\case
|
||||||
|
NoFor -> ""
|
||||||
|
JsonFor ForJson {jsonCardinality, jsonRoot = root} ->
|
||||||
|
"FOR JSON PATH" <+>
|
||||||
|
case jsonCardinality of
|
||||||
|
JsonArray -> ""
|
||||||
|
JsonSingleton ->
|
||||||
|
", WITHOUT_ARRAY_WRAPPER" <+>
|
||||||
|
case root of
|
||||||
|
NoRoot -> ""
|
||||||
|
Root text -> "ROOT(" <+> QueryPrinter (toSql text) <+> ")"
|
||||||
|
|
||||||
|
fromProjection :: Projection -> Printer
|
||||||
|
fromProjection =
|
||||||
|
\case
|
||||||
|
ExpressionProjection aliasedExpression ->
|
||||||
|
fromAliased (fmap fromExpression aliasedExpression)
|
||||||
|
FieldNameProjection aliasedFieldName ->
|
||||||
|
fromAliased (fmap fromFieldName aliasedFieldName)
|
||||||
|
AggregateProjection aliasedAggregate ->
|
||||||
|
fromAliased (fmap fromAggregate aliasedAggregate)
|
||||||
|
StarProjection -> "*"
|
||||||
|
|
||||||
|
fromAggregate :: Aggregate -> Printer
|
||||||
|
fromAggregate =
|
||||||
|
\case
|
||||||
|
CountAggregate countable -> "COUNT(" <+> fromCountable countable <+> ")"
|
||||||
|
OpAggregate text args ->
|
||||||
|
QueryPrinter (rawUnescapedText text) <+>
|
||||||
|
"(" <+> SepByPrinter ", " (map fromExpression (toList args)) <+> ")"
|
||||||
|
TextAggregate text -> fromExpression (ValueExpression (TextValue text))
|
||||||
|
|
||||||
|
fromCountable :: Countable FieldName -> Printer
|
||||||
|
fromCountable =
|
||||||
|
\case
|
||||||
|
StarCountable -> "*"
|
||||||
|
NonNullFieldCountable fields ->
|
||||||
|
SepByPrinter ", " (map fromFieldName (toList fields))
|
||||||
|
DistinctCountable fields ->
|
||||||
|
"DISTINCT " <+>
|
||||||
|
SepByPrinter ", " (map fromFieldName (toList fields))
|
||||||
|
|
||||||
|
fromWhere :: Where -> Printer
|
||||||
|
fromWhere =
|
||||||
|
\case
|
||||||
|
Where expressions ->
|
||||||
|
case (filter ((/= trueExpression) . collapse)) expressions of
|
||||||
|
[] -> ""
|
||||||
|
collapsedExpressions ->
|
||||||
|
"WHERE " <+>
|
||||||
|
IndentPrinter 6 (fromExpression (AndExpression collapsedExpressions))
|
||||||
|
where collapse (AndExpression [x]) = collapse x
|
||||||
|
collapse (AndExpression []) = trueExpression
|
||||||
|
collapse (OrExpression [x]) = collapse x
|
||||||
|
collapse x = x
|
||||||
|
|
||||||
|
fromFrom :: From -> Printer
|
||||||
|
fromFrom =
|
||||||
|
\case
|
||||||
|
FromQualifiedTable aliasedQualifiedTableName ->
|
||||||
|
fromAliased (fmap fromTableName aliasedQualifiedTableName)
|
||||||
|
FromOpenJson openJson -> fromAliased (fmap fromOpenJson openJson)
|
||||||
|
|
||||||
|
fromOpenJson :: OpenJson -> Printer
|
||||||
|
fromOpenJson OpenJson {openJsonExpression, openJsonWith} =
|
||||||
|
SepByPrinter
|
||||||
|
NewlinePrinter
|
||||||
|
[ "OPENJSON(" <+>
|
||||||
|
IndentPrinter 9 (fromExpression openJsonExpression) <+> ")"
|
||||||
|
, "WITH (" <+>
|
||||||
|
IndentPrinter
|
||||||
|
5
|
||||||
|
(SepByPrinter
|
||||||
|
("," <+> NewlinePrinter)
|
||||||
|
(toList (fmap fromJsonFieldSpec openJsonWith))) <+>
|
||||||
|
")"
|
||||||
|
]
|
||||||
|
|
||||||
|
fromJsonFieldSpec :: JsonFieldSpec -> Printer
|
||||||
|
fromJsonFieldSpec =
|
||||||
|
\case
|
||||||
|
IntField name -> fromNameText name <+> " INT"
|
||||||
|
JsonField name -> fromNameText name <+> " NVARCHAR(MAX) AS JSON"
|
||||||
|
|
||||||
|
fromTableName :: TableName -> Printer
|
||||||
|
fromTableName TableName {tableName, tableSchema} =
|
||||||
|
fromNameText tableSchema <+> "." <+> fromNameText tableName
|
||||||
|
|
||||||
|
fromAliased :: Aliased Printer -> Printer
|
||||||
|
fromAliased Aliased {..} =
|
||||||
|
aliasedThing <+>
|
||||||
|
((" AS " <+>) . fromNameText) aliasedAlias
|
||||||
|
|
||||||
|
fromNameText :: Text -> Printer
|
||||||
|
fromNameText t = QueryPrinter (rawUnescapedText ("[" <> t <> "]"))
|
||||||
|
|
||||||
|
trueExpression :: Expression
|
||||||
|
trueExpression = ValueExpression (BoolValue True)
|
||||||
|
|
||||||
|
falseExpression :: Expression
|
||||||
|
falseExpression = ValueExpression (BoolValue False)
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Basic printing API
|
||||||
|
|
||||||
|
toQueryFlat :: Printer -> Query
|
||||||
|
toQueryFlat = go 0
|
||||||
|
where
|
||||||
|
go level =
|
||||||
|
\case
|
||||||
|
QueryPrinter q -> q
|
||||||
|
SeqPrinter xs -> mconcat (filter notEmpty (map (go level) xs))
|
||||||
|
SepByPrinter x xs ->
|
||||||
|
mconcat
|
||||||
|
(intersperse (go level x) (filter notEmpty (map (go level) xs)))
|
||||||
|
NewlinePrinter -> " "
|
||||||
|
IndentPrinter n p -> go (level + n) p
|
||||||
|
notEmpty = (/= mempty) . renderQuery
|
||||||
|
|
||||||
|
toQueryPretty :: Printer -> Query
|
||||||
|
toQueryPretty = go 0
|
||||||
|
where
|
||||||
|
go level =
|
||||||
|
\case
|
||||||
|
QueryPrinter q -> q
|
||||||
|
SeqPrinter xs -> mconcat (filter notEmpty (map (go level) xs))
|
||||||
|
SepByPrinter x xs ->
|
||||||
|
mconcat
|
||||||
|
(intersperse (go level x) (filter notEmpty (map (go level) xs)))
|
||||||
|
NewlinePrinter -> "\n" <> indentation level
|
||||||
|
IndentPrinter n p -> go (level + n) p
|
||||||
|
indentation n = rawUnescapedText (T.replicate n " ")
|
||||||
|
notEmpty = (/= mempty) . renderQuery
|
46
server/src-lib/Hasura/Backends/MSSQL/Types.hs
Normal file
46
server/src-lib/Hasura/Backends/MSSQL/Types.hs
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
-- | Types for Transact-SQL aka T-SQL; the language of SQL Server.
|
||||||
|
|
||||||
|
module Hasura.Backends.MSSQL.Types
|
||||||
|
( MSSQLSourceConfig(..)
|
||||||
|
, MSSQLRunSQL(..)
|
||||||
|
, module Hasura.Backends.MSSQL.Types.Internal
|
||||||
|
) where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import Data.Aeson
|
||||||
|
import Data.Aeson.TH
|
||||||
|
|
||||||
|
import qualified Database.ODBC.SQLServer as ODBC
|
||||||
|
|
||||||
|
import Hasura.Backends.MSSQL.Connection
|
||||||
|
import Hasura.Backends.MSSQL.Types.Instances ()
|
||||||
|
import Hasura.Backends.MSSQL.Types.Internal
|
||||||
|
import Hasura.Incremental (Cacheable (..))
|
||||||
|
import Hasura.RQL.Types.Common
|
||||||
|
|
||||||
|
data MSSQLSourceConfig
|
||||||
|
= MSSQLSourceConfig
|
||||||
|
{ _mscConnectionString :: !MSSQLConnectionString
|
||||||
|
, _mscConnection :: !ODBC.Connection
|
||||||
|
} deriving (Generic)
|
||||||
|
|
||||||
|
instance Show MSSQLSourceConfig where
|
||||||
|
show = show . _mscConnectionString
|
||||||
|
|
||||||
|
instance Eq MSSQLSourceConfig where
|
||||||
|
MSSQLSourceConfig connStr1 _ == MSSQLSourceConfig connStr2 _ =
|
||||||
|
connStr1 == connStr2
|
||||||
|
|
||||||
|
instance Cacheable MSSQLSourceConfig where
|
||||||
|
unchanged _ = (==)
|
||||||
|
|
||||||
|
instance ToJSON MSSQLSourceConfig where
|
||||||
|
toJSON = toJSON . _mscConnectionString
|
||||||
|
|
||||||
|
data MSSQLRunSQL
|
||||||
|
= MSSQLRunSQL
|
||||||
|
{ _mrsSql :: Text
|
||||||
|
, _mrsSource :: !SourceName
|
||||||
|
} deriving (Show, Eq)
|
||||||
|
$(deriveJSON hasuraJSON ''MSSQLRunSQL)
|
182
server/src-lib/Hasura/Backends/MSSQL/Types/Instances.hs
Normal file
182
server/src-lib/Hasura/Backends/MSSQL/Types/Instances.hs
Normal file
@ -0,0 +1,182 @@
|
|||||||
|
{-# LANGUAGE TemplateHaskell #-}
|
||||||
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||||
|
|
||||||
|
-- | Instances that're slow to compile.
|
||||||
|
|
||||||
|
module Hasura.Backends.MSSQL.Types.Instances where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Database.ODBC.SQLServer as ODBC
|
||||||
|
|
||||||
|
import Data.Aeson
|
||||||
|
import Data.Aeson.Types
|
||||||
|
import Data.Text.Extended (ToTxt (..))
|
||||||
|
import Hasura.Backends.MSSQL.Types.Internal
|
||||||
|
import Hasura.Incremental.Internal.Dependency
|
||||||
|
import Language.Haskell.TH
|
||||||
|
import Language.Haskell.TH.Syntax
|
||||||
|
|
||||||
|
|
||||||
|
$(fmap concat $ for [''Aliased]
|
||||||
|
\name -> [d|
|
||||||
|
deriving instance Generic ($(conT name) a)
|
||||||
|
instance Hashable a => Hashable ($(conT name) a)
|
||||||
|
instance Cacheable a => Cacheable ($(conT name) a)
|
||||||
|
deriving instance Eq a => Eq ($(conT name) a)
|
||||||
|
instance NFData a => NFData ($(conT name) a)
|
||||||
|
deriving instance Show a => Show ($(conT name) a)
|
||||||
|
deriving instance Functor $(conT name)
|
||||||
|
deriving instance Data a => Data ($(conT name) a)
|
||||||
|
|])
|
||||||
|
|
||||||
|
$(fmap concat $ for [ ''UnifiedTableName
|
||||||
|
, ''UnifiedObjectRelationship
|
||||||
|
, ''UnifiedArrayRelationship
|
||||||
|
, ''UnifiedUsing
|
||||||
|
, ''UnifiedOn
|
||||||
|
, ''UnifiedColumn
|
||||||
|
]
|
||||||
|
\name -> [d|
|
||||||
|
deriving instance Generic $(conT name)
|
||||||
|
instance Hashable $(conT name)
|
||||||
|
instance Cacheable $(conT name)
|
||||||
|
deriving instance Eq $(conT name)
|
||||||
|
deriving instance Show $(conT name)
|
||||||
|
deriving instance Data $(conT name)
|
||||||
|
instance FromJSON $(conT name)
|
||||||
|
deriving instance Ord $(conT name)
|
||||||
|
|])
|
||||||
|
|
||||||
|
$(fmap concat $ for [ ''Where
|
||||||
|
, ''For
|
||||||
|
, ''Aggregate
|
||||||
|
, ''EntityAlias
|
||||||
|
, ''ForJson
|
||||||
|
, ''JsonCardinality
|
||||||
|
, ''Root
|
||||||
|
, ''OrderBy
|
||||||
|
, ''JoinAlias
|
||||||
|
, ''Reselect
|
||||||
|
, ''ColumnName
|
||||||
|
, ''Expression
|
||||||
|
, ''NullsOrder
|
||||||
|
, ''Order
|
||||||
|
, ''ScalarType
|
||||||
|
, ''TableName
|
||||||
|
, ''Select
|
||||||
|
, ''Top
|
||||||
|
, ''FieldName
|
||||||
|
, ''JsonPath
|
||||||
|
, ''Op
|
||||||
|
, ''Projection
|
||||||
|
, ''From
|
||||||
|
, ''OpenJson
|
||||||
|
, ''JsonFieldSpec
|
||||||
|
, ''Join
|
||||||
|
, ''JoinSource
|
||||||
|
]
|
||||||
|
\name -> [d|
|
||||||
|
deriving instance Generic $(conT name)
|
||||||
|
instance Hashable $(conT name)
|
||||||
|
instance Cacheable $(conT name)
|
||||||
|
deriving instance Eq $(conT name)
|
||||||
|
deriving instance Show $(conT name)
|
||||||
|
deriving instance Data $(conT name)
|
||||||
|
instance NFData $(conT name)
|
||||||
|
|])
|
||||||
|
|
||||||
|
|
||||||
|
$(fmap concat $ for [''TableName, ''ScalarType]
|
||||||
|
\name -> [d|deriving instance Ord $(conT name) |])
|
||||||
|
|
||||||
|
$(fmap concat $ for [''TableName, ''NullsOrder, ''Order]
|
||||||
|
\name -> [d|deriving instance Lift $(conT name) |])
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Third-party types
|
||||||
|
|
||||||
|
instance Cacheable ODBC.Value
|
||||||
|
instance Cacheable ODBC.Binary
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Debug instances
|
||||||
|
|
||||||
|
instance ToTxt ScalarType where
|
||||||
|
toTxt = tshow -- TODO: include schema
|
||||||
|
|
||||||
|
instance ToTxt TableName where
|
||||||
|
toTxt = tshow -- TODO: include schema
|
||||||
|
|
||||||
|
instance ToTxt ColumnName where
|
||||||
|
toTxt = columnNameText
|
||||||
|
|
||||||
|
$(fmap concat $ for [''Order, ''NullsOrder, ''ScalarType, ''FieldName]
|
||||||
|
\name -> [d|
|
||||||
|
instance ToJSON $(conT name) where
|
||||||
|
toJSON = genericToJSON hasuraJSON
|
||||||
|
instance FromJSON $(conT name) where
|
||||||
|
parseJSON = genericParseJSON hasuraJSON |])
|
||||||
|
|
||||||
|
deriving instance FromJSON ColumnName
|
||||||
|
deriving instance ToJSON ColumnName
|
||||||
|
|
||||||
|
instance FromJSON TableName where
|
||||||
|
parseJSON v@(String _) =
|
||||||
|
TableName <$> parseJSON v <*> pure "dbo"
|
||||||
|
parseJSON (Object o) =
|
||||||
|
TableName <$>
|
||||||
|
o .: "name" <*>
|
||||||
|
o .:? "schema" .!= "dbo"
|
||||||
|
parseJSON _ =
|
||||||
|
fail "expecting a string/object for TableName"
|
||||||
|
|
||||||
|
instance ToJSON TableName where
|
||||||
|
toJSON = genericToJSON hasuraJSON
|
||||||
|
|
||||||
|
instance ToJSONKey TableName where
|
||||||
|
toJSONKey = toJSONKeyText $ \(TableName schema name) -> schema <> "." <> name
|
||||||
|
|
||||||
|
deriving newtype instance ToJSONKey ColumnName
|
||||||
|
instance ToJSONKey ScalarType
|
||||||
|
|
||||||
|
deriving newtype instance FromJSONKey ColumnName
|
||||||
|
|
||||||
|
instance Arbitrary ColumnName where
|
||||||
|
arbitrary = genericArbitrary
|
||||||
|
|
||||||
|
instance Arbitrary TableName where
|
||||||
|
arbitrary = genericArbitrary
|
||||||
|
|
||||||
|
instance ToTxt () where
|
||||||
|
toTxt = tshow
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Manual instances
|
||||||
|
|
||||||
|
deriving instance Generic (Countable n)
|
||||||
|
instance Hashable n => Hashable (Countable n)
|
||||||
|
instance Cacheable n => Cacheable (Countable n)
|
||||||
|
deriving instance Eq n => Eq (Countable n)
|
||||||
|
deriving instance Show n => Show (Countable n)
|
||||||
|
deriving instance Data n => Data (Countable n)
|
||||||
|
instance NFData n => NFData (Countable n)
|
||||||
|
instance ToJSON n => ToJSON (Countable n)
|
||||||
|
instance FromJSON n => FromJSON (Countable n)
|
||||||
|
|
||||||
|
instance Monoid Where where
|
||||||
|
mempty = Where mempty
|
||||||
|
|
||||||
|
instance Semigroup Where where
|
||||||
|
(Where x) <> (Where y) = Where (x <> y)
|
||||||
|
|
||||||
|
instance Monoid Top where
|
||||||
|
mempty = NoTop
|
||||||
|
|
||||||
|
instance Semigroup Top where
|
||||||
|
(<>) :: Top -> Top -> Top
|
||||||
|
(<>) NoTop x = x
|
||||||
|
(<>) x NoTop = x
|
||||||
|
(<>) (Top x) (Top y) = Top (min x y)
|
354
server/src-lib/Hasura/Backends/MSSQL/Types/Internal.hs
Normal file
354
server/src-lib/Hasura/Backends/MSSQL/Types/Internal.hs
Normal file
@ -0,0 +1,354 @@
|
|||||||
|
{-# LANGUAGE DuplicateRecordFields #-}
|
||||||
|
|
||||||
|
-- | Types for Transact-SQL aka T-SQL; the language of SQL Server.
|
||||||
|
|
||||||
|
module Hasura.Backends.MSSQL.Types.Internal where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Data.Aeson as J
|
||||||
|
import Data.Text.Encoding (encodeUtf8)
|
||||||
|
import qualified Database.ODBC.SQLServer as ODBC
|
||||||
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
|
|
||||||
|
import Hasura.RQL.Types.Error
|
||||||
|
import Hasura.SQL.Backend
|
||||||
|
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
-- Phantom pretend-generic types that are actually specific
|
||||||
|
|
||||||
|
type Column (b :: BackendType) = ColumnName
|
||||||
|
|
||||||
|
type ColumnType (b :: BackendType) = ScalarType
|
||||||
|
|
||||||
|
type Value = ODBC.Value
|
||||||
|
|
||||||
|
--------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
data UnifiedColumn = UnifiedColumn
|
||||||
|
{ name :: !Text
|
||||||
|
, type' :: !ScalarType
|
||||||
|
}
|
||||||
|
|
||||||
|
data UnifiedTableName = UnifiedTableName
|
||||||
|
{ schema :: !Text
|
||||||
|
, name :: !Text
|
||||||
|
}
|
||||||
|
|
||||||
|
data UnifiedObjectRelationship = UnifiedObjectRelationship
|
||||||
|
{ using :: !UnifiedUsing
|
||||||
|
, name :: !Text
|
||||||
|
}
|
||||||
|
|
||||||
|
data UnifiedArrayRelationship = UnifiedArrayRelationship
|
||||||
|
{ using :: !UnifiedUsing
|
||||||
|
, name :: !Text
|
||||||
|
}
|
||||||
|
|
||||||
|
data UnifiedUsing = UnifiedUsing
|
||||||
|
{ foreign_key_constraint_on :: !UnifiedOn
|
||||||
|
}
|
||||||
|
|
||||||
|
data UnifiedOn = UnifiedOn
|
||||||
|
{ table :: !UnifiedTableName
|
||||||
|
, column :: !Text
|
||||||
|
}
|
||||||
|
|
||||||
|
-------------------------------------------------------------------------------
|
||||||
|
-- AST types
|
||||||
|
|
||||||
|
data Select = Select
|
||||||
|
{ selectTop :: !Top
|
||||||
|
, selectProjections :: ![Projection]
|
||||||
|
, selectFrom :: !From
|
||||||
|
, selectJoins :: ![Join]
|
||||||
|
, selectWhere :: !Where
|
||||||
|
, selectFor :: !For
|
||||||
|
, selectOrderBy :: !(Maybe (NonEmpty OrderBy))
|
||||||
|
, selectOffset :: !(Maybe Expression)
|
||||||
|
}
|
||||||
|
|
||||||
|
data Delete = Delete
|
||||||
|
{ deleteTable :: !(Aliased TableName)
|
||||||
|
, deleteWhere :: !Where
|
||||||
|
}
|
||||||
|
|
||||||
|
data Reselect = Reselect
|
||||||
|
{ reselectProjections :: ![Projection]
|
||||||
|
, reselectFor :: !For
|
||||||
|
, reselectWhere :: !Where
|
||||||
|
}
|
||||||
|
|
||||||
|
data OrderBy = OrderBy
|
||||||
|
{ orderByFieldName :: FieldName
|
||||||
|
, orderByOrder :: Order
|
||||||
|
, orderByNullsOrder :: NullsOrder
|
||||||
|
}
|
||||||
|
|
||||||
|
data Order
|
||||||
|
= AscOrder
|
||||||
|
| DescOrder
|
||||||
|
|
||||||
|
data NullsOrder
|
||||||
|
= NullsFirst
|
||||||
|
| NullsLast
|
||||||
|
| NullsAnyOrder
|
||||||
|
|
||||||
|
data For
|
||||||
|
= JsonFor ForJson
|
||||||
|
| NoFor
|
||||||
|
|
||||||
|
data ForJson = ForJson
|
||||||
|
{ jsonCardinality :: JsonCardinality
|
||||||
|
, jsonRoot :: Root
|
||||||
|
}
|
||||||
|
|
||||||
|
data Root
|
||||||
|
= NoRoot
|
||||||
|
| Root Text
|
||||||
|
|
||||||
|
data JsonCardinality
|
||||||
|
= JsonArray
|
||||||
|
| JsonSingleton
|
||||||
|
|
||||||
|
data Projection
|
||||||
|
= ExpressionProjection (Aliased Expression)
|
||||||
|
| FieldNameProjection (Aliased FieldName)
|
||||||
|
| AggregateProjection (Aliased Aggregate)
|
||||||
|
| StarProjection
|
||||||
|
|
||||||
|
data Join = Join
|
||||||
|
{ joinSource :: !JoinSource
|
||||||
|
, joinJoinAlias :: !JoinAlias
|
||||||
|
}
|
||||||
|
|
||||||
|
data JoinSource
|
||||||
|
= JoinSelect Select
|
||||||
|
| JoinReselect Reselect
|
||||||
|
|
||||||
|
|
||||||
|
data JoinAlias = JoinAlias
|
||||||
|
{ joinAliasEntity :: Text
|
||||||
|
, joinAliasField :: Maybe Text
|
||||||
|
}
|
||||||
|
|
||||||
|
newtype Where =
|
||||||
|
Where [Expression]
|
||||||
|
|
||||||
|
data Top
|
||||||
|
= NoTop
|
||||||
|
| Top Int
|
||||||
|
|
||||||
|
data Expression
|
||||||
|
= ValueExpression ODBC.Value
|
||||||
|
| AndExpression [Expression]
|
||||||
|
| OrExpression [Expression]
|
||||||
|
| NotExpression Expression
|
||||||
|
| ExistsExpression Select
|
||||||
|
| SelectExpression Select
|
||||||
|
| IsNullExpression Expression
|
||||||
|
| IsNotNullExpression Expression
|
||||||
|
| ColumnExpression FieldName
|
||||||
|
| EqualExpression Expression Expression
|
||||||
|
| NotEqualExpression Expression Expression
|
||||||
|
| JsonQueryExpression Expression
|
||||||
|
-- ^ This one acts like a "cast to JSON" and makes SQL Server
|
||||||
|
-- behave like it knows your field is JSON and not double-encode
|
||||||
|
-- it.
|
||||||
|
| ToStringExpression Expression
|
||||||
|
| JsonValueExpression Expression JsonPath
|
||||||
|
-- ^ This is for getting actual atomic values out of a JSON
|
||||||
|
-- string.
|
||||||
|
| OpExpression Op Expression Expression
|
||||||
|
|
||||||
|
data JsonPath
|
||||||
|
= RootPath
|
||||||
|
| FieldPath JsonPath Text
|
||||||
|
| IndexPath JsonPath Integer
|
||||||
|
|
||||||
|
data Aggregate
|
||||||
|
= CountAggregate (Countable FieldName)
|
||||||
|
| OpAggregate !Text [Expression]
|
||||||
|
| TextAggregate !Text
|
||||||
|
|
||||||
|
data Countable name
|
||||||
|
= StarCountable
|
||||||
|
| NonNullFieldCountable (NonEmpty name)
|
||||||
|
| DistinctCountable (NonEmpty name)
|
||||||
|
|
||||||
|
data From
|
||||||
|
= FromQualifiedTable (Aliased TableName)
|
||||||
|
| FromOpenJson (Aliased OpenJson)
|
||||||
|
|
||||||
|
data OpenJson = OpenJson
|
||||||
|
{ openJsonExpression :: Expression
|
||||||
|
, openJsonWith :: NonEmpty JsonFieldSpec
|
||||||
|
}
|
||||||
|
|
||||||
|
data JsonFieldSpec
|
||||||
|
= IntField Text
|
||||||
|
| JsonField Text
|
||||||
|
|
||||||
|
data Aliased a = Aliased
|
||||||
|
{ aliasedThing :: !a
|
||||||
|
, aliasedAlias :: !Text
|
||||||
|
}
|
||||||
|
|
||||||
|
newtype SchemaName = SchemaName
|
||||||
|
{ schemaNameParts :: [Text]
|
||||||
|
}
|
||||||
|
|
||||||
|
data TableName = TableName
|
||||||
|
{ tableName :: !Text
|
||||||
|
, tableSchema :: !Text
|
||||||
|
}
|
||||||
|
|
||||||
|
type FunctionName = Text -- TODO: Improve this type when SQL function support added to MSSQL
|
||||||
|
|
||||||
|
data FieldName = FieldName
|
||||||
|
{ fieldName :: Text
|
||||||
|
, fieldNameEntity :: !Text
|
||||||
|
}
|
||||||
|
|
||||||
|
data Comment = DueToPermission | RequestedSingleObject
|
||||||
|
|
||||||
|
newtype EntityAlias = EntityAlias
|
||||||
|
{ entityAliasText :: Text
|
||||||
|
}
|
||||||
|
|
||||||
|
data Op
|
||||||
|
= LessOp
|
||||||
|
| LessOrEqualOp
|
||||||
|
| MoreOp
|
||||||
|
| MoreOrEqualOp
|
||||||
|
-- | SIN
|
||||||
|
-- | SNE
|
||||||
|
-- | SLIKE
|
||||||
|
-- | SNLIKE
|
||||||
|
-- | SILIKE
|
||||||
|
-- | SNILIKE
|
||||||
|
-- | SSIMILAR
|
||||||
|
-- | SNSIMILAR
|
||||||
|
-- | SGTE
|
||||||
|
-- | SLTE
|
||||||
|
-- | SNIN
|
||||||
|
-- | SContains
|
||||||
|
-- | SContainedIn
|
||||||
|
-- | SHasKey
|
||||||
|
-- | SHasKeysAny
|
||||||
|
-- | SHasKeysAll
|
||||||
|
|
||||||
|
-- | Column name of some database table -- this differs to FieldName
|
||||||
|
-- that is used for referring to things within a query.
|
||||||
|
newtype ColumnName = ColumnName { columnNameText :: Text }
|
||||||
|
|
||||||
|
-- | Derived from the odbc package.
|
||||||
|
data ScalarType
|
||||||
|
= CharType
|
||||||
|
| NumericType
|
||||||
|
| DecimalType
|
||||||
|
| IntegerType
|
||||||
|
| SmallintType
|
||||||
|
| FloatType
|
||||||
|
| RealType
|
||||||
|
| DateType
|
||||||
|
| Ss_time2Type
|
||||||
|
| VarcharType
|
||||||
|
| WcharType
|
||||||
|
| WvarcharType
|
||||||
|
| WtextType
|
||||||
|
| TimestampType
|
||||||
|
| TextType
|
||||||
|
| BinaryType
|
||||||
|
| VarbinaryType
|
||||||
|
| BigintType
|
||||||
|
| TinyintType
|
||||||
|
| BitType
|
||||||
|
| GuidType
|
||||||
|
| UnknownType !Text
|
||||||
|
|
||||||
|
scalarTypeDBName :: ScalarType -> Text
|
||||||
|
scalarTypeDBName = \case
|
||||||
|
CharType -> "char"
|
||||||
|
NumericType -> "numeric"
|
||||||
|
DecimalType -> "decimal"
|
||||||
|
IntegerType -> "int"
|
||||||
|
SmallintType -> "smallint"
|
||||||
|
FloatType -> "float"
|
||||||
|
RealType -> "real"
|
||||||
|
DateType -> "date"
|
||||||
|
Ss_time2Type -> "time"
|
||||||
|
VarcharType -> "varchar"
|
||||||
|
WcharType -> "nchar"
|
||||||
|
WvarcharType -> "nvarchar"
|
||||||
|
WtextType -> "ntext"
|
||||||
|
TextType -> "text"
|
||||||
|
TimestampType -> "timestamp"
|
||||||
|
BinaryType -> "binary"
|
||||||
|
VarbinaryType -> "varbinary"
|
||||||
|
BigintType -> "bigint"
|
||||||
|
TinyintType -> "tinyint"
|
||||||
|
BitType -> "bit"
|
||||||
|
GuidType -> "uniqueidentifier"
|
||||||
|
-- the input form for types that aren't explicitly supported is a string
|
||||||
|
UnknownType t -> t
|
||||||
|
|
||||||
|
parseScalarValue :: ScalarType -> J.Value -> Either QErr Value
|
||||||
|
parseScalarValue scalarType jValue = case scalarType of
|
||||||
|
CharType -> ODBC.ByteStringValue . encodeUtf8 <$> parseJValue jValue
|
||||||
|
VarcharType -> ODBC.ByteStringValue . encodeUtf8 <$> parseJValue jValue
|
||||||
|
TextType -> ODBC.ByteStringValue . encodeUtf8 <$> parseJValue jValue
|
||||||
|
NumericType -> ODBC.FloatValue <$> parseJValue jValue
|
||||||
|
DecimalType -> ODBC.FloatValue <$> parseJValue jValue
|
||||||
|
IntegerType -> ODBC.IntValue <$> parseJValue jValue
|
||||||
|
SmallintType -> ODBC.IntValue <$> parseJValue jValue
|
||||||
|
FloatType -> ODBC.FloatValue <$> parseJValue jValue
|
||||||
|
RealType -> ODBC.FloatValue <$> parseJValue jValue
|
||||||
|
DateType -> ODBC.DayValue <$> parseJValue jValue
|
||||||
|
Ss_time2Type -> ODBC.TimeOfDayValue <$> parseJValue jValue
|
||||||
|
WcharType -> ODBC.TextValue <$> parseJValue jValue
|
||||||
|
WvarcharType -> ODBC.TextValue <$> parseJValue jValue
|
||||||
|
WtextType -> ODBC.TextValue <$> parseJValue jValue
|
||||||
|
TimestampType -> ODBC.LocalTimeValue <$> parseJValue jValue
|
||||||
|
BinaryType -> ODBC.BinaryValue . ODBC.Binary . txtToBs <$> parseJValue jValue
|
||||||
|
VarbinaryType -> ODBC.BinaryValue . ODBC.Binary . txtToBs <$> parseJValue jValue
|
||||||
|
BigintType -> ODBC.IntValue <$> parseJValue jValue
|
||||||
|
TinyintType -> ODBC.IntValue <$> parseJValue jValue
|
||||||
|
BitType -> ODBC.ByteValue <$> parseJValue jValue
|
||||||
|
GuidType -> ODBC.TextValue <$> parseJValue jValue
|
||||||
|
-- the input format for types that aren't explicitly supported is a string
|
||||||
|
UnknownType _ -> ODBC.TextValue <$> parseJValue jValue
|
||||||
|
where
|
||||||
|
parseJValue :: (J.FromJSON a) => J.Value -> Either QErr a
|
||||||
|
parseJValue = runAesonParser J.parseJSON
|
||||||
|
|
||||||
|
isComparableType, isNumType :: ScalarType -> Bool
|
||||||
|
isComparableType = \case
|
||||||
|
BinaryType -> False
|
||||||
|
VarbinaryType -> False
|
||||||
|
BitType -> False
|
||||||
|
GuidType -> False
|
||||||
|
_ -> True
|
||||||
|
isNumType = \case
|
||||||
|
NumericType -> True
|
||||||
|
DecimalType -> True
|
||||||
|
IntegerType -> True
|
||||||
|
SmallintType -> True
|
||||||
|
FloatType -> True
|
||||||
|
RealType -> True
|
||||||
|
BigintType -> True
|
||||||
|
TinyintType -> True
|
||||||
|
_ -> False
|
||||||
|
|
||||||
|
getGQLTableName :: TableName -> Either QErr G.Name
|
||||||
|
getGQLTableName tn = do
|
||||||
|
let textName = snakeCaseTableName tn
|
||||||
|
onNothing (G.mkName $ snakeCaseTableName tn) $ throw400 ValidationFailed $
|
||||||
|
"cannot include " <> textName <> " in the GraphQL schema because it is not a valid GraphQL identifier"
|
||||||
|
|
||||||
|
snakeCaseTableName :: TableName -> Text
|
||||||
|
snakeCaseTableName TableName { tableName, tableSchema } =
|
||||||
|
if tableSchema == "dbo"
|
||||||
|
then tableName
|
||||||
|
else tableSchema <> "_" <> tableName
|
@ -4,8 +4,6 @@ module Hasura.Backends.Postgres.DDL
|
|||||||
)
|
)
|
||||||
where
|
where
|
||||||
|
|
||||||
import qualified Data.Text as T
|
|
||||||
|
|
||||||
import Data.Aeson
|
import Data.Aeson
|
||||||
|
|
||||||
import Hasura.Backends.Postgres.SQL.DML
|
import Hasura.Backends.Postgres.SQL.DML
|
||||||
@ -50,6 +48,3 @@ mkTypedSessionVar
|
|||||||
-> SessionVariable -> PartialSQLExp 'Postgres
|
-> SessionVariable -> PartialSQLExp 'Postgres
|
||||||
mkTypedSessionVar columnType =
|
mkTypedSessionVar columnType =
|
||||||
PSESessVar (unsafePGColumnToBackend <$> columnType)
|
PSESessVar (unsafePGColumnToBackend <$> columnType)
|
||||||
|
|
||||||
isReqUserId :: Text -> Bool
|
|
||||||
isReqUserId = (== "req_user_id") . T.toLower
|
|
||||||
|
@ -315,7 +315,7 @@ withMetadataCheck source cascade txAccess action = do
|
|||||||
postActionSchemaCache <- askSchemaCache
|
postActionSchemaCache <- askSchemaCache
|
||||||
|
|
||||||
-- Recreate event triggers in hdb_catalog
|
-- Recreate event triggers in hdb_catalog
|
||||||
let postActionTables = fromMaybe mempty $ unsafeTableCache source $ scPostgres postActionSchemaCache
|
let postActionTables = fromMaybe mempty $ unsafeTableCache source $ scSources postActionSchemaCache
|
||||||
serverConfigCtx <- askServerConfigCtx
|
serverConfigCtx <- askServerConfigCtx
|
||||||
liftEitherM $ runPgSourceWriteTx sourceConfig $
|
liftEitherM $ runPgSourceWriteTx sourceConfig $
|
||||||
forM_ (M.elems postActionTables) $ \(TableInfo coreInfo _ eventTriggers) -> do
|
forM_ (M.elems postActionTables) $ \(TableInfo coreInfo _ eventTriggers) -> do
|
||||||
|
@ -1,6 +1,8 @@
|
|||||||
module Hasura.Backends.Postgres.DDL.Source
|
module Hasura.Backends.Postgres.DDL.Source
|
||||||
(resolveSourceConfig, resolveDatabaseMetadata)
|
( resolveSourceConfig
|
||||||
where
|
, postDropSourceHook
|
||||||
|
, resolveDatabaseMetadata
|
||||||
|
) where
|
||||||
|
|
||||||
import qualified Data.HashMap.Strict as Map
|
import qualified Data.HashMap.Strict as Map
|
||||||
import qualified Database.PG.Query as Q
|
import qualified Database.PG.Query as Q
|
||||||
@ -150,3 +152,27 @@ fetchPgScalars =
|
|||||||
SELECT coalesce(json_agg(typname), '[]')
|
SELECT coalesce(json_agg(typname), '[]')
|
||||||
FROM pg_catalog.pg_type where typtype = 'b'
|
FROM pg_catalog.pg_type where typtype = 'b'
|
||||||
|] () True
|
|] () True
|
||||||
|
|
||||||
|
-- | Clean source database after dropping in metadata
|
||||||
|
postDropSourceHook
|
||||||
|
:: (MonadIO m, MonadError QErr m, MonadBaseControl IO m)
|
||||||
|
=> PGSourceConfig -> m ()
|
||||||
|
postDropSourceHook sourceConfig = do
|
||||||
|
-- Clean traces of Hasura in source database
|
||||||
|
liftEitherM $ runPgSourceWriteTx sourceConfig $ do
|
||||||
|
hdbMetadataTableExist <- doesTableExist "hdb_catalog" "hdb_metadata"
|
||||||
|
eventLogTableExist <- doesTableExist "hdb_catalog" "event_log"
|
||||||
|
-- If "hdb_metadata" and "event_log" tables found in the "hdb_catalog" schema
|
||||||
|
-- then this infers the source is being used as default potgres source (--database-url option).
|
||||||
|
-- In this case don't drop any thing in the catalog schema.
|
||||||
|
if | hdbMetadataTableExist && eventLogTableExist -> pure ()
|
||||||
|
-- Otherwise, if only "hdb_metadata" table exist, then this infers the source is
|
||||||
|
-- being used as metadata storage (--metadata-database-url option). In this case
|
||||||
|
-- drop only source related tables and not "hdb_catalog" schema
|
||||||
|
| hdbMetadataTableExist ->
|
||||||
|
Q.multiQE defaultTxErrorHandler $(Q.sqlFromFile "src-rsr/drop_pg_source.sql")
|
||||||
|
-- Otherwise, drop "hdb_catalog" schema.
|
||||||
|
| otherwise -> dropHdbCatalogSchema
|
||||||
|
|
||||||
|
-- Destory postgres source connection
|
||||||
|
liftIO $ _pecDestroyConn $ _pscExecCtx sourceConfig
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||||
|
|
||||||
module Hasura.GraphQL.Execute.Postgres () where
|
module Hasura.Backends.Postgres.Instances.Execute () where
|
||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
@ -174,13 +174,13 @@ pgDBMutationPlan env manager reqHeaders userInfo stringifyNum sourceConfig mrf =
|
|||||||
remoteJoinCtx = (manager, reqHeaders, userInfo)
|
remoteJoinCtx = (manager, reqHeaders, userInfo)
|
||||||
|
|
||||||
|
|
||||||
-- mutation
|
-- subscription
|
||||||
|
|
||||||
pgDBSubscriptionPlan
|
pgDBSubscriptionPlan
|
||||||
:: forall m
|
:: forall m.
|
||||||
. ( MonadError QErr m
|
( MonadError QErr m
|
||||||
, MonadIO m
|
, MonadIO m
|
||||||
)
|
)
|
||||||
=> UserInfo
|
=> UserInfo
|
||||||
-> SourceConfig 'Postgres
|
-> SourceConfig 'Postgres
|
||||||
-> InsOrdHashMap G.Name (QueryDB 'Postgres (UnpreparedValue 'Postgres))
|
-> InsOrdHashMap G.Name (QueryDB 'Postgres (UnpreparedValue 'Postgres))
|
559
server/src-lib/Hasura/Backends/Postgres/Instances/Schema.hs
Normal file
559
server/src-lib/Hasura/Backends/Postgres/Instances/Schema.hs
Normal file
@ -0,0 +1,559 @@
|
|||||||
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||||
|
|
||||||
|
module Hasura.Backends.Postgres.Instances.Schema () where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Data.Aeson as J
|
||||||
|
import qualified Data.HashMap.Strict as Map
|
||||||
|
import qualified Data.HashMap.Strict.Extended as M
|
||||||
|
import qualified Data.HashMap.Strict.InsOrd.Extended as OMap
|
||||||
|
import qualified Data.List.NonEmpty as NE
|
||||||
|
import qualified Data.Text as T
|
||||||
|
import qualified Database.PG.Query as Q
|
||||||
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
|
|
||||||
|
import Data.Parser.JSONPath
|
||||||
|
import Data.Text.Extended
|
||||||
|
|
||||||
|
import qualified Hasura.GraphQL.Parser as P
|
||||||
|
import qualified Hasura.GraphQL.Schema.Backend as BS
|
||||||
|
import qualified Hasura.GraphQL.Schema.Build as GSB
|
||||||
|
import qualified Hasura.RQL.IR.Select as IR
|
||||||
|
import qualified Hasura.RQL.IR.Update as IR
|
||||||
|
|
||||||
|
import Hasura.Backends.Postgres.SQL.DML as PG hiding (CountType)
|
||||||
|
import Hasura.Backends.Postgres.SQL.Types as PG hiding (FunctionName, TableName)
|
||||||
|
import Hasura.Backends.Postgres.SQL.Value as PG
|
||||||
|
import Hasura.GraphQL.Context
|
||||||
|
import Hasura.GraphQL.Parser hiding (EnumValueInfo, field)
|
||||||
|
import Hasura.GraphQL.Parser.Internal.Parser hiding (field)
|
||||||
|
import Hasura.GraphQL.Schema.Backend (BackendSchema, ComparisonExp,
|
||||||
|
MonadBuildSchema)
|
||||||
|
import Hasura.GraphQL.Schema.Common
|
||||||
|
import Hasura.GraphQL.Schema.Select
|
||||||
|
import Hasura.GraphQL.Schema.Table
|
||||||
|
import Hasura.RQL.Types
|
||||||
|
import Hasura.SQL.Types
|
||||||
|
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
-- BackendSchema instance
|
||||||
|
|
||||||
|
instance BackendSchema 'Postgres where
|
||||||
|
-- top level parsers
|
||||||
|
buildTableQueryFields = GSB.buildTableQueryFields
|
||||||
|
buildTableRelayQueryFields = buildTableRelayQueryFields
|
||||||
|
buildTableInsertMutationFields = GSB.buildTableInsertMutationFields
|
||||||
|
buildTableUpdateMutationFields = GSB.buildTableUpdateMutationFields
|
||||||
|
buildTableDeleteMutationFields = GSB.buildTableDeleteMutationFields
|
||||||
|
buildFunctionQueryFields = GSB.buildFunctionQueryFields
|
||||||
|
buildFunctionRelayQueryFields = buildFunctionRelayQueryFields
|
||||||
|
buildFunctionMutationFields = GSB.buildFunctionMutationFields
|
||||||
|
-- backend extensions
|
||||||
|
relayExtension = const $ Just ()
|
||||||
|
nodesAggExtension = const $ Just ()
|
||||||
|
-- indivdual components
|
||||||
|
columnParser = columnParser
|
||||||
|
jsonPathArg = jsonPathArg
|
||||||
|
orderByOperators = orderByOperators
|
||||||
|
comparisonExps = comparisonExps
|
||||||
|
updateOperators = updateOperators
|
||||||
|
offsetParser = offsetParser
|
||||||
|
mkCountType = mkCountType
|
||||||
|
aggregateOrderByCountType = PG.PGInteger
|
||||||
|
computedField = computedFieldPG
|
||||||
|
node = nodePG
|
||||||
|
tableDistinctOn = tableDistinctOn
|
||||||
|
remoteRelationshipField = remoteRelationshipFieldPG
|
||||||
|
-- SQL literals
|
||||||
|
columnDefaultValue = const PG.columnDefaultValue
|
||||||
|
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
-- Top level parsers
|
||||||
|
|
||||||
|
buildTableRelayQueryFields
|
||||||
|
:: MonadBuildSchema 'Postgres r m n
|
||||||
|
=> SourceName
|
||||||
|
-> SourceConfig 'Postgres
|
||||||
|
-> TableName 'Postgres
|
||||||
|
-> TableInfo 'Postgres
|
||||||
|
-> G.Name
|
||||||
|
-> NESeq (ColumnInfo 'Postgres)
|
||||||
|
-> SelPermInfo 'Postgres
|
||||||
|
-> m (Maybe (FieldParser n (QueryRootField UnpreparedValue)))
|
||||||
|
buildTableRelayQueryFields sourceName sourceInfo tableName tableInfo gqlName pkeyColumns selPerms = do
|
||||||
|
let
|
||||||
|
mkRF = RFDB sourceName sourceInfo . QDBR
|
||||||
|
fieldName = gqlName <> $$(G.litName "_connection")
|
||||||
|
fieldDesc = Just $ G.Description $ "fetch data from the table: " <>> tableName
|
||||||
|
optionalFieldParser (mkRF . QDBConnection) $ selectTableConnection tableName fieldName fieldDesc pkeyColumns selPerms
|
||||||
|
|
||||||
|
buildFunctionRelayQueryFields
|
||||||
|
:: MonadBuildSchema 'Postgres r m n
|
||||||
|
=> SourceName
|
||||||
|
-> SourceConfig 'Postgres
|
||||||
|
-> FunctionName 'Postgres
|
||||||
|
-> FunctionInfo 'Postgres
|
||||||
|
-> TableName 'Postgres
|
||||||
|
-> NESeq (ColumnInfo 'Postgres)
|
||||||
|
-> SelPermInfo 'Postgres
|
||||||
|
-> m (Maybe (FieldParser n (QueryRootField UnpreparedValue)))
|
||||||
|
buildFunctionRelayQueryFields sourceName sourceInfo functionName functionInfo tableName pkeyColumns selPerms = do
|
||||||
|
funcName <- functionGraphQLName @'Postgres functionName `onLeft` throwError
|
||||||
|
let
|
||||||
|
mkRF = RFDB sourceName sourceInfo . QDBR
|
||||||
|
fieldName = funcName <> $$(G.litName "_connection")
|
||||||
|
fieldDesc = Just $ G.Description $ "execute function " <> functionName <<> " which returns " <>> tableName
|
||||||
|
optionalFieldParser (mkRF . QDBConnection) $ selectFunctionConnection functionInfo fieldName fieldDesc pkeyColumns selPerms
|
||||||
|
|
||||||
|
|
||||||
|
----------------------------------------------------------------
|
||||||
|
-- Individual components
|
||||||
|
|
||||||
|
columnParser
|
||||||
|
:: (MonadSchema n m, MonadError QErr m)
|
||||||
|
=> ColumnType 'Postgres
|
||||||
|
-> G.Nullability
|
||||||
|
-> m (Parser 'Both n (Opaque (ColumnValue 'Postgres)))
|
||||||
|
columnParser columnType (G.Nullability isNullable) =
|
||||||
|
-- TODO(PDV): It might be worth memoizing this function even though it isn’t
|
||||||
|
-- recursive simply for performance reasons, since it’s likely to be hammered
|
||||||
|
-- during schema generation. Need to profile to see whether or not it’s a win.
|
||||||
|
opaque . fmap (ColumnValue columnType) <$> case columnType of
|
||||||
|
ColumnScalar scalarType -> possiblyNullable scalarType <$> case scalarType of
|
||||||
|
PGInteger -> pure (PGValInteger <$> P.int)
|
||||||
|
PGBoolean -> pure (PGValBoolean <$> P.boolean)
|
||||||
|
PGFloat -> pure (PGValDouble <$> P.float)
|
||||||
|
PGText -> pure (PGValText <$> P.string)
|
||||||
|
PGVarchar -> pure (PGValVarchar <$> P.string)
|
||||||
|
PGJSON -> pure (PGValJSON . Q.JSON <$> P.json)
|
||||||
|
PGJSONB -> pure (PGValJSONB . Q.JSONB <$> P.jsonb)
|
||||||
|
-- For all other scalars, we convert the value to JSON and use the
|
||||||
|
-- FromJSON instance. The major upside is that this avoids having to write
|
||||||
|
-- new parsers for each custom type: if the JSON parser is sound, so will
|
||||||
|
-- this one, and it avoids the risk of having two separate ways of parsing
|
||||||
|
-- a value in the codebase, which could lead to inconsistencies.
|
||||||
|
_ -> do
|
||||||
|
name <- mkScalarTypeName scalarType
|
||||||
|
let schemaType = P.NonNullable $ P.TNamed $ P.mkDefinition name Nothing P.TIScalar
|
||||||
|
pure $ Parser
|
||||||
|
{ pType = schemaType
|
||||||
|
, pParser =
|
||||||
|
valueToJSON (P.toGraphQLType schemaType) >=>
|
||||||
|
either (parseErrorWith ParseFailed . qeError) pure . runAesonParser (parsePGValue scalarType)
|
||||||
|
}
|
||||||
|
ColumnEnumReference (EnumReference tableName enumValues) ->
|
||||||
|
case nonEmpty (Map.toList enumValues) of
|
||||||
|
Just enumValuesList -> do
|
||||||
|
name <- qualifiedObjectToName tableName <&> (<> $$(G.litName "_enum"))
|
||||||
|
pure $ possiblyNullable PGText $ P.enum name Nothing (mkEnumValue <$> enumValuesList)
|
||||||
|
Nothing -> throw400 ValidationFailed "empty enum values"
|
||||||
|
where
|
||||||
|
-- Sadly, this combinator is not sound in general, so we can’t export it
|
||||||
|
-- for general-purpose use. If we did, someone could write this:
|
||||||
|
--
|
||||||
|
-- mkParameter <$> opaque do
|
||||||
|
-- n <- int
|
||||||
|
-- pure (mkIntColumnValue (n + 1))
|
||||||
|
--
|
||||||
|
-- Now we’d end up with a UVParameter that has a variable in it, so we’d
|
||||||
|
-- parameterize over it. But when we’d reuse the plan, we wouldn’t know to
|
||||||
|
-- increment the value by 1, so we’d use the wrong value!
|
||||||
|
--
|
||||||
|
-- We could theoretically solve this by retaining a reference to the parser
|
||||||
|
-- itself and re-parsing each new value, using the saved parser, which
|
||||||
|
-- would admittedly be neat. But it’s more complicated, and it isn’t clear
|
||||||
|
-- that it would actually be useful, so for now we don’t support it.
|
||||||
|
opaque :: MonadParse m => Parser 'Both m a -> Parser 'Both m (Opaque a)
|
||||||
|
opaque parser = parser
|
||||||
|
{ pParser = \case
|
||||||
|
P.GraphQLValue (G.VVariable var@Variable{ vInfo, vValue }) -> do
|
||||||
|
typeCheck False (P.toGraphQLType $ pType parser) var
|
||||||
|
P.mkOpaque (Just vInfo) <$> pParser parser (absurd <$> vValue)
|
||||||
|
value -> P.mkOpaque Nothing <$> pParser parser value
|
||||||
|
}
|
||||||
|
possiblyNullable scalarType
|
||||||
|
| isNullable = fmap (fromMaybe $ PGNull scalarType) . P.nullable
|
||||||
|
| otherwise = id
|
||||||
|
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, PGScalarValue)
|
||||||
|
mkEnumValue (EnumValue value, EnumValueInfo description) =
|
||||||
|
( P.mkDefinition value (G.Description <$> description) P.EnumValueInfo
|
||||||
|
, PGValText $ G.unName value
|
||||||
|
)
|
||||||
|
|
||||||
|
jsonPathArg
|
||||||
|
:: MonadParse n
|
||||||
|
=> ColumnType 'Postgres
|
||||||
|
-> InputFieldsParser n (Maybe (IR.ColumnOp 'Postgres))
|
||||||
|
jsonPathArg columnType
|
||||||
|
| isScalarColumnWhere PG.isJSONType columnType =
|
||||||
|
P.fieldOptional fieldName description P.string `P.bindFields` fmap join . traverse toColExp
|
||||||
|
| otherwise = pure Nothing
|
||||||
|
where
|
||||||
|
fieldName = $$(G.litName "path")
|
||||||
|
description = Just "JSON select path"
|
||||||
|
toColExp textValue = case parseJSONPath textValue of
|
||||||
|
Left err -> parseError $ T.pack $ "parse json path error: " ++ err
|
||||||
|
Right [] -> pure Nothing
|
||||||
|
Right jPaths -> pure $ Just $ IR.ColumnOp PG.jsonbPathOp $ PG.SEArray $ map elToColExp jPaths
|
||||||
|
elToColExp (Key k) = PG.SELit k
|
||||||
|
elToColExp (Index i) = PG.SELit $ tshow i
|
||||||
|
|
||||||
|
orderByOperators
|
||||||
|
:: NonEmpty (Definition P.EnumValueInfo, (BasicOrderType 'Postgres, NullsOrderType 'Postgres))
|
||||||
|
orderByOperators = NE.fromList
|
||||||
|
[ ( define $$(G.litName "asc") "in ascending order, nulls last"
|
||||||
|
, (PG.OTAsc, PG.NLast)
|
||||||
|
)
|
||||||
|
, ( define $$(G.litName "asc_nulls_first") "in ascending order, nulls first"
|
||||||
|
, (PG.OTAsc, PG.NFirst)
|
||||||
|
)
|
||||||
|
, ( define $$(G.litName "asc_nulls_last") "in ascending order, nulls last"
|
||||||
|
, (PG.OTAsc, PG.NLast)
|
||||||
|
)
|
||||||
|
, ( define $$(G.litName "desc") "in descending order, nulls first"
|
||||||
|
, (PG.OTDesc, PG.NFirst)
|
||||||
|
)
|
||||||
|
, ( define $$(G.litName "desc_nulls_first") "in descending order, nulls first"
|
||||||
|
, (PG.OTDesc, PG.NFirst)
|
||||||
|
)
|
||||||
|
, ( define $$(G.litName "desc_nulls_last") "in descending order, nulls last"
|
||||||
|
, (PG.OTDesc, PG.NLast)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
where
|
||||||
|
define name desc = P.mkDefinition name (Just desc) P.EnumValueInfo
|
||||||
|
|
||||||
|
comparisonExps
|
||||||
|
:: forall m n. (BackendSchema 'Postgres, MonadSchema n m, MonadError QErr m)
|
||||||
|
=> ColumnType 'Postgres -> m (Parser 'Input n [ComparisonExp 'Postgres])
|
||||||
|
comparisonExps = P.memoize 'comparisonExps \columnType -> do
|
||||||
|
geogInputParser <- geographyWithinDistanceInput
|
||||||
|
geomInputParser <- geometryWithinDistanceInput
|
||||||
|
ignInputParser <- intersectsGeomNbandInput
|
||||||
|
ingInputParser <- intersectsNbandGeomInput
|
||||||
|
-- see Note [Columns in comparison expression are never nullable]
|
||||||
|
typedParser <- columnParser columnType (G.Nullability False)
|
||||||
|
nullableTextParser <- columnParser (ColumnScalar PGText) (G.Nullability True)
|
||||||
|
textParser <- columnParser (ColumnScalar PGText) (G.Nullability False)
|
||||||
|
maybeCastParser <- castExp columnType
|
||||||
|
let name = P.getName typedParser <> $$(G.litName "_comparison_exp")
|
||||||
|
desc = G.Description $ "Boolean expression to compare columns of type "
|
||||||
|
<> P.getName typedParser
|
||||||
|
<<> ". All fields are combined with logical 'AND'."
|
||||||
|
textListParser = P.list textParser `P.bind` traverse P.openOpaque
|
||||||
|
columnListParser = P.list typedParser `P.bind` traverse P.openOpaque
|
||||||
|
pure $ P.object name (Just desc) $ fmap catMaybes $ sequenceA $ concat
|
||||||
|
[ flip (maybe []) maybeCastParser $ \castParser ->
|
||||||
|
[ P.fieldOptional $$(G.litName "_cast") Nothing (ACast <$> castParser)
|
||||||
|
]
|
||||||
|
-- Common ops for all types
|
||||||
|
, [ P.fieldOptional $$(G.litName "_is_null") Nothing (bool ANISNOTNULL ANISNULL <$> P.boolean)
|
||||||
|
, P.fieldOptional $$(G.litName "_eq") Nothing (AEQ True . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_neq") Nothing (ANE True . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_in") Nothing (AIN . mkListLiteral columnType <$> columnListParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_nin") Nothing (ANIN . mkListLiteral columnType <$> columnListParser)
|
||||||
|
]
|
||||||
|
-- Comparison ops for non Raster types
|
||||||
|
, guard (isScalarColumnWhere (/= PGRaster) columnType) *>
|
||||||
|
[ P.fieldOptional $$(G.litName "_gt") Nothing (AGT . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_lt") Nothing (ALT . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_gte") Nothing (AGTE . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_lte") Nothing (ALTE . mkParameter <$> typedParser)
|
||||||
|
]
|
||||||
|
-- Ops for Raster types
|
||||||
|
, guard (isScalarColumnWhere (== PGRaster) columnType) *>
|
||||||
|
[ P.fieldOptional $$(G.litName "_st_intersects_rast")
|
||||||
|
Nothing
|
||||||
|
(ASTIntersectsRast . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_st_intersects_nband_geom")
|
||||||
|
Nothing
|
||||||
|
(ASTIntersectsNbandGeom <$> ingInputParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_st_intersects_geom_nband")
|
||||||
|
Nothing
|
||||||
|
(ASTIntersectsGeomNband <$> ignInputParser)
|
||||||
|
]
|
||||||
|
-- Ops for String like types
|
||||||
|
, guard (isScalarColumnWhere isStringType columnType) *>
|
||||||
|
[ P.fieldOptional $$(G.litName "_like")
|
||||||
|
(Just "does the column match the given pattern")
|
||||||
|
(ALIKE . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_nlike")
|
||||||
|
(Just "does the column NOT match the given pattern")
|
||||||
|
(ANLIKE . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_ilike")
|
||||||
|
(Just "does the column match the given case-insensitive pattern")
|
||||||
|
(AILIKE () . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_nilike")
|
||||||
|
(Just "does the column NOT match the given case-insensitive pattern")
|
||||||
|
(ANILIKE () . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_similar")
|
||||||
|
(Just "does the column match the given SQL regular expression")
|
||||||
|
(ASIMILAR . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_nsimilar")
|
||||||
|
(Just "does the column NOT match the given SQL regular expression")
|
||||||
|
(ANSIMILAR . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_regex")
|
||||||
|
(Just "does the column match the given POSIX regular expression, case sensitive")
|
||||||
|
(AREGEX . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_iregex")
|
||||||
|
(Just "does the column match the given POSIX regular expression, case insensitive")
|
||||||
|
(AIREGEX . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_nregex")
|
||||||
|
(Just "does the column NOT match the given POSIX regular expression, case sensitive")
|
||||||
|
(ANREGEX . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_niregex")
|
||||||
|
(Just "does the column NOT match the given POSIX regular expression, case insensitive")
|
||||||
|
(ANIREGEX . mkParameter <$> typedParser)
|
||||||
|
]
|
||||||
|
-- Ops for JSONB type
|
||||||
|
, guard (isScalarColumnWhere (== PGJSONB) columnType) *>
|
||||||
|
[ P.fieldOptional $$(G.litName "_contains")
|
||||||
|
(Just "does the column contain the given json value at the top level")
|
||||||
|
(AContains . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_contained_in")
|
||||||
|
(Just "is the column contained in the given json value")
|
||||||
|
(AContainedIn . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_has_key")
|
||||||
|
(Just "does the string exist as a top-level key in the column")
|
||||||
|
(AHasKey . mkParameter <$> nullableTextParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_has_keys_any")
|
||||||
|
(Just "do any of these strings exist as top-level keys in the column")
|
||||||
|
(AHasKeysAny . mkListLiteral (ColumnScalar PGText) <$> textListParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_has_keys_all")
|
||||||
|
(Just "do all of these strings exist as top-level keys in the column")
|
||||||
|
(AHasKeysAll . mkListLiteral (ColumnScalar PGText) <$> textListParser)
|
||||||
|
]
|
||||||
|
-- Ops for Geography type
|
||||||
|
, guard (isScalarColumnWhere (== PGGeography) columnType) *>
|
||||||
|
[ P.fieldOptional $$(G.litName "_st_intersects")
|
||||||
|
(Just "does the column spatially intersect the given geography value")
|
||||||
|
(ASTIntersects . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_st_d_within")
|
||||||
|
(Just "is the column within a given distance from the given geography value")
|
||||||
|
(ASTDWithinGeog <$> geogInputParser)
|
||||||
|
]
|
||||||
|
-- Ops for Geometry type
|
||||||
|
, guard (isScalarColumnWhere (== PGGeometry) columnType) *>
|
||||||
|
[ P.fieldOptional $$(G.litName "_st_contains")
|
||||||
|
(Just "does the column contain the given geometry value")
|
||||||
|
(ASTContains . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_st_crosses")
|
||||||
|
(Just "does the column cross the given geometry value")
|
||||||
|
(ASTCrosses . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_st_equals")
|
||||||
|
(Just "is the column equal to given geometry value (directionality is ignored)")
|
||||||
|
(ASTEquals . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_st_overlaps")
|
||||||
|
(Just "does the column 'spatially overlap' (intersect but not completely contain) the given geometry value")
|
||||||
|
(ASTOverlaps . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_st_touches")
|
||||||
|
(Just "does the column have atleast one point in common with the given geometry value")
|
||||||
|
(ASTTouches . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_st_within")
|
||||||
|
(Just "is the column contained in the given geometry value")
|
||||||
|
(ASTWithin . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_st_intersects")
|
||||||
|
(Just "does the column spatially intersect the given geometry value")
|
||||||
|
(ASTIntersects . mkParameter <$> typedParser)
|
||||||
|
, P.fieldOptional $$(G.litName "_st_d_within")
|
||||||
|
(Just "is the column within a given distance from the given geometry value")
|
||||||
|
(ASTDWithinGeom <$> geomInputParser)
|
||||||
|
]
|
||||||
|
]
|
||||||
|
where
|
||||||
|
mkListLiteral :: ColumnType 'Postgres -> [ColumnValue 'Postgres] -> UnpreparedValue 'Postgres
|
||||||
|
mkListLiteral columnType columnValues = P.UVLiteral $ SETyAnn
|
||||||
|
(SEArray $ txtEncoder . cvValue <$> columnValues)
|
||||||
|
(mkTypeAnn $ CollectableTypeArray $ unsafePGColumnToBackend columnType)
|
||||||
|
|
||||||
|
castExp :: ColumnType 'Postgres -> m (Maybe (Parser 'Input n (CastExp 'Postgres (UnpreparedValue 'Postgres))))
|
||||||
|
castExp sourceType = do
|
||||||
|
let maybeScalars = case sourceType of
|
||||||
|
ColumnScalar PGGeography -> Just (PGGeography, PGGeometry)
|
||||||
|
ColumnScalar PGGeometry -> Just (PGGeometry, PGGeography)
|
||||||
|
_ -> Nothing
|
||||||
|
|
||||||
|
forM maybeScalars $ \(sourceScalar, targetScalar) -> do
|
||||||
|
sourceName <- mkScalarTypeName sourceScalar <&> (<> $$(G.litName "_cast_exp"))
|
||||||
|
targetName <- mkScalarTypeName targetScalar
|
||||||
|
targetOpExps <- comparisonExps $ ColumnScalar targetScalar
|
||||||
|
let field = P.fieldOptional targetName Nothing $ (targetScalar, ) <$> targetOpExps
|
||||||
|
pure $ P.object sourceName Nothing $ M.fromList . maybeToList <$> field
|
||||||
|
|
||||||
|
geographyWithinDistanceInput
|
||||||
|
:: forall m n. (MonadSchema n m, MonadError QErr m)
|
||||||
|
=> m (Parser 'Input n (DWithinGeogOp (UnpreparedValue 'Postgres)))
|
||||||
|
geographyWithinDistanceInput = do
|
||||||
|
geographyParser <- columnParser (ColumnScalar PGGeography) (G.Nullability False)
|
||||||
|
-- FIXME
|
||||||
|
-- It doesn't make sense for this value to be nullable; it only is for
|
||||||
|
-- backwards compatibility; if an explicit Null value is given, it will be
|
||||||
|
-- forwarded to the underlying SQL function, that in turns treat a null value
|
||||||
|
-- as an error. We can fix this by rejecting explicit null values, by marking
|
||||||
|
-- this field non-nullable in a future release.
|
||||||
|
booleanParser <- columnParser (ColumnScalar PGBoolean) (G.Nullability True)
|
||||||
|
floatParser <- columnParser (ColumnScalar PGFloat) (G.Nullability False)
|
||||||
|
pure $ P.object $$(G.litName "st_d_within_geography_input") Nothing $
|
||||||
|
DWithinGeogOp <$> (mkParameter <$> P.field $$(G.litName "distance") Nothing floatParser)
|
||||||
|
<*> (mkParameter <$> P.field $$(G.litName "from") Nothing geographyParser)
|
||||||
|
<*> (mkParameter <$> P.fieldWithDefault $$(G.litName "use_spheroid") Nothing (G.VBoolean True) booleanParser)
|
||||||
|
|
||||||
|
geometryWithinDistanceInput
|
||||||
|
:: forall m n. (MonadSchema n m, MonadError QErr m)
|
||||||
|
=> m (Parser 'Input n (DWithinGeomOp (UnpreparedValue 'Postgres)))
|
||||||
|
geometryWithinDistanceInput = do
|
||||||
|
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
|
||||||
|
floatParser <- columnParser (ColumnScalar PGFloat) (G.Nullability False)
|
||||||
|
pure $ P.object $$(G.litName "st_d_within_input") Nothing $
|
||||||
|
DWithinGeomOp <$> (mkParameter <$> P.field $$(G.litName "distance") Nothing floatParser)
|
||||||
|
<*> (mkParameter <$> P.field $$(G.litName "from") Nothing geometryParser)
|
||||||
|
|
||||||
|
intersectsNbandGeomInput
|
||||||
|
:: forall m n. (MonadSchema n m, MonadError QErr m)
|
||||||
|
=> m (Parser 'Input n (STIntersectsNbandGeommin (UnpreparedValue 'Postgres)))
|
||||||
|
intersectsNbandGeomInput = do
|
||||||
|
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
|
||||||
|
integerParser <- columnParser (ColumnScalar PGInteger) (G.Nullability False)
|
||||||
|
pure $ P.object $$(G.litName "st_intersects_nband_geom_input") Nothing $
|
||||||
|
STIntersectsNbandGeommin <$> (mkParameter <$> P.field $$(G.litName "nband") Nothing integerParser)
|
||||||
|
<*> (mkParameter <$> P.field $$(G.litName "geommin") Nothing geometryParser)
|
||||||
|
|
||||||
|
intersectsGeomNbandInput
|
||||||
|
:: forall m n. (MonadSchema n m, MonadError QErr m)
|
||||||
|
=> m (Parser 'Input n (STIntersectsGeomminNband (UnpreparedValue 'Postgres)))
|
||||||
|
intersectsGeomNbandInput = do
|
||||||
|
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
|
||||||
|
integerParser <- columnParser (ColumnScalar PGInteger) (G.Nullability False)
|
||||||
|
pure $ P.object $$(G.litName "st_intersects_geom_nband_input") Nothing $ STIntersectsGeomminNband
|
||||||
|
<$> ( mkParameter <$> P.field $$(G.litName "geommin") Nothing geometryParser)
|
||||||
|
<*> (fmap mkParameter <$> P.fieldOptional $$(G.litName "nband") Nothing integerParser)
|
||||||
|
|
||||||
|
offsetParser :: MonadParse n => Parser 'Both n (SQLExpression 'Postgres)
|
||||||
|
offsetParser = PG.txtEncoder <$> Parser
|
||||||
|
{ pType = fakeBigIntSchemaType
|
||||||
|
, pParser = peelVariable (Just $ P.toGraphQLType fakeBigIntSchemaType) >=> \case
|
||||||
|
P.GraphQLValue (G.VInt i) -> PG.PGValBigInt <$> convertWith PG.scientificToInteger (fromInteger i)
|
||||||
|
P.JSONValue (J.Number n) -> PG.PGValBigInt <$> convertWith PG.scientificToInteger n
|
||||||
|
P.GraphQLValue (G.VString s) -> pure $ PG.PGValUnknown s
|
||||||
|
P.JSONValue (J.String s) -> pure $ PG.PGValUnknown s
|
||||||
|
v -> typeMismatch $$(G.litName "Int") "a 32-bit integer, or a 64-bit integer represented as a string" v
|
||||||
|
}
|
||||||
|
where
|
||||||
|
fakeBigIntSchemaType = P.NonNullable $ P.TNamed $ P.mkDefinition $$(G.litName "Int") Nothing P.TIScalar
|
||||||
|
convertWith f = either (parseErrorWith ParseFailed . qeError) pure . runAesonParser f
|
||||||
|
|
||||||
|
mkCountType :: Maybe Bool -> Maybe [Column 'Postgres] -> CountType 'Postgres
|
||||||
|
mkCountType _ Nothing = PG.CTStar
|
||||||
|
mkCountType (Just True) (Just cols) = PG.CTDistinct cols
|
||||||
|
mkCountType _ (Just cols) = PG.CTSimple cols
|
||||||
|
|
||||||
|
-- | Argument to distinct select on columns returned from table selection
|
||||||
|
-- > distinct_on: [table_select_column!]
|
||||||
|
tableDistinctOn
|
||||||
|
:: forall m n r. (MonadSchema n m, MonadTableInfo r m, MonadRole r m)
|
||||||
|
=> TableName 'Postgres
|
||||||
|
-> SelPermInfo 'Postgres
|
||||||
|
-> m (InputFieldsParser n (Maybe (XDistinct 'Postgres, NonEmpty (Column 'Postgres))))
|
||||||
|
tableDistinctOn table selectPermissions = do
|
||||||
|
columnsEnum <- tableSelectColumnsEnum table selectPermissions
|
||||||
|
pure $ do
|
||||||
|
maybeDistinctOnColumns <- join.join <$> for columnsEnum
|
||||||
|
(P.fieldOptional distinctOnName distinctOnDesc . P.nullable . P.list)
|
||||||
|
pure $ maybeDistinctOnColumns >>= NE.nonEmpty <&> ((),)
|
||||||
|
where
|
||||||
|
distinctOnName = $$(G.litName "distinct_on")
|
||||||
|
distinctOnDesc = Just $ G.Description "distinct select on columns"
|
||||||
|
|
||||||
|
-- | Various update operators
|
||||||
|
updateOperators
|
||||||
|
:: forall m n r. (MonadSchema n m, MonadTableInfo r m)
|
||||||
|
=> QualifiedTable -- ^ qualified name of the table
|
||||||
|
-> UpdPermInfo 'Postgres -- ^ update permissions of the table
|
||||||
|
-> m (Maybe (InputFieldsParser n [(Column 'Postgres, IR.UpdOpExpG (UnpreparedValue 'Postgres))]))
|
||||||
|
updateOperators table updatePermissions = do
|
||||||
|
tableGQLName <- getTableGQLName @'Postgres table
|
||||||
|
columns <- tableUpdateColumns table updatePermissions
|
||||||
|
let numericCols = onlyNumCols columns
|
||||||
|
jsonCols = onlyJSONBCols columns
|
||||||
|
parsers <- catMaybes <$> sequenceA
|
||||||
|
[ updateOperator tableGQLName $$(G.litName "_set")
|
||||||
|
typedParser IR.UpdSet columns
|
||||||
|
"sets the columns of the filtered rows to the given values"
|
||||||
|
(G.Description $ "input type for updating data in table " <>> table)
|
||||||
|
|
||||||
|
, updateOperator tableGQLName $$(G.litName "_inc")
|
||||||
|
typedParser IR.UpdInc numericCols
|
||||||
|
"increments the numeric columns with given value of the filtered values"
|
||||||
|
(G.Description $"input type for incrementing numeric columns in table " <>> table)
|
||||||
|
|
||||||
|
, let desc = "prepend existing jsonb value of filtered columns with new jsonb value"
|
||||||
|
in updateOperator tableGQLName $$(G.litName "_prepend")
|
||||||
|
typedParser IR.UpdPrepend jsonCols desc desc
|
||||||
|
|
||||||
|
, let desc = "append existing jsonb value of filtered columns with new jsonb value"
|
||||||
|
in updateOperator tableGQLName $$(G.litName "_append")
|
||||||
|
typedParser IR.UpdAppend jsonCols desc desc
|
||||||
|
|
||||||
|
, let desc = "delete key/value pair or string element. key/value pairs are matched based on their key value"
|
||||||
|
in updateOperator tableGQLName $$(G.litName "_delete_key")
|
||||||
|
nullableTextParser IR.UpdDeleteKey jsonCols desc desc
|
||||||
|
|
||||||
|
, let desc = "delete the array element with specified index (negative integers count from the end). "
|
||||||
|
<> "throws an error if top level container is not an array"
|
||||||
|
in updateOperator tableGQLName $$(G.litName "_delete_elem")
|
||||||
|
nonNullableIntParser IR.UpdDeleteElem jsonCols desc desc
|
||||||
|
|
||||||
|
, let desc = "delete the field or element with specified path (for JSON arrays, negative integers count from the end)"
|
||||||
|
in updateOperator tableGQLName $$(G.litName "_delete_at_path")
|
||||||
|
(fmap P.list . nonNullableTextParser) IR.UpdDeleteAtPath jsonCols desc desc
|
||||||
|
]
|
||||||
|
whenMaybe (not $ null parsers) do
|
||||||
|
let allowedOperators = fst <$> parsers
|
||||||
|
pure $ fmap catMaybes (sequenceA $ snd <$> parsers)
|
||||||
|
`P.bindFields` \opExps -> do
|
||||||
|
-- there needs to be at least one operator in the update, even if it is empty
|
||||||
|
let presetColumns = Map.toList $ IR.UpdSet . partialSQLExpToUnpreparedValue <$> upiSet updatePermissions
|
||||||
|
when (null opExps && null presetColumns) $ parseError $
|
||||||
|
"at least any one of " <> commaSeparated allowedOperators <> " is expected"
|
||||||
|
|
||||||
|
-- no column should appear twice
|
||||||
|
let flattenedExps = concat opExps
|
||||||
|
erroneousExps = OMap.filter ((>1) . length) $ OMap.groupTuples flattenedExps
|
||||||
|
unless (OMap.null erroneousExps) $ parseError $
|
||||||
|
"column found in multiple operators; " <>
|
||||||
|
T.intercalate ". " [ dquote columnName <> " in " <> commaSeparated (IR.updateOperatorText <$> ops)
|
||||||
|
| (columnName, ops) <- OMap.toList erroneousExps
|
||||||
|
]
|
||||||
|
|
||||||
|
pure $ presetColumns <> flattenedExps
|
||||||
|
where
|
||||||
|
typedParser columnInfo = fmap P.mkParameter <$> columnParser (pgiType columnInfo) (G.Nullability $ pgiIsNullable columnInfo)
|
||||||
|
nonNullableTextParser _ = fmap P.mkParameter <$> columnParser (ColumnScalar PGText) (G.Nullability False)
|
||||||
|
nullableTextParser _ = fmap P.mkParameter <$> columnParser (ColumnScalar PGText) (G.Nullability True)
|
||||||
|
nonNullableIntParser _ = fmap P.mkParameter <$> columnParser (ColumnScalar PGInteger) (G.Nullability False)
|
||||||
|
|
||||||
|
updateOperator
|
||||||
|
:: G.Name
|
||||||
|
-> G.Name
|
||||||
|
-> (ColumnInfo b -> m (Parser 'Both n a))
|
||||||
|
-> (a -> IR.UpdOpExpG (UnpreparedValue b))
|
||||||
|
-> [ColumnInfo b]
|
||||||
|
-> G.Description
|
||||||
|
-> G.Description
|
||||||
|
-> m (Maybe (Text, InputFieldsParser n (Maybe [(Column b, IR.UpdOpExpG (UnpreparedValue b))])))
|
||||||
|
updateOperator tableGQLName opName mkParser updOpExp columns opDesc objDesc =
|
||||||
|
whenMaybe (not $ null columns) do
|
||||||
|
fields <- for columns \columnInfo -> do
|
||||||
|
let fieldName = pgiName columnInfo
|
||||||
|
fieldDesc = pgiDescription columnInfo
|
||||||
|
fieldParser <- mkParser columnInfo
|
||||||
|
pure $ P.fieldOptional fieldName fieldDesc fieldParser
|
||||||
|
`mapField` \value -> (pgiColumn columnInfo, updOpExp value)
|
||||||
|
let objName = tableGQLName <> opName <> $$(G.litName "_input")
|
||||||
|
pure $ (G.unName opName,)
|
||||||
|
$ P.fieldOptional opName (Just opDesc)
|
||||||
|
$ P.object objName (Just objDesc)
|
||||||
|
$ catMaybes <$> sequenceA fields
|
@ -1,6 +1,6 @@
|
|||||||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||||
|
|
||||||
module Hasura.GraphQL.Transport.Postgres () where
|
module Hasura.Backends.Postgres.Instances.Transport () where
|
||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
@ -19,7 +19,6 @@ import qualified Hasura.Tracing as Tracing
|
|||||||
import Hasura.EncJSON
|
import Hasura.EncJSON
|
||||||
import Hasura.GraphQL.Execute.Backend
|
import Hasura.GraphQL.Execute.Backend
|
||||||
import Hasura.GraphQL.Execute.LiveQuery.Plan
|
import Hasura.GraphQL.Execute.LiveQuery.Plan
|
||||||
import Hasura.GraphQL.Execute.Postgres ()
|
|
||||||
import Hasura.GraphQL.Logging (MonadQueryLog (..))
|
import Hasura.GraphQL.Logging (MonadQueryLog (..))
|
||||||
import Hasura.GraphQL.Transport.Backend
|
import Hasura.GraphQL.Transport.Backend
|
||||||
import Hasura.GraphQL.Transport.HTTP.Protocol
|
import Hasura.GraphQL.Transport.HTTP.Protocol
|
59
server/src-lib/Hasura/Backends/Postgres/Instances/Types.hs
Normal file
59
server/src-lib/Hasura/Backends/Postgres/Instances/Types.hs
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
||||||
|
|
||||||
|
module Hasura.Backends.Postgres.Instances.Types where
|
||||||
|
|
||||||
|
import Hasura.Prelude
|
||||||
|
|
||||||
|
import qualified Hasura.Backends.Postgres.Connection as PG
|
||||||
|
import qualified Hasura.Backends.Postgres.SQL.DML as PG
|
||||||
|
import qualified Hasura.Backends.Postgres.SQL.Types as PG
|
||||||
|
import qualified Hasura.Backends.Postgres.SQL.Value as PG
|
||||||
|
|
||||||
|
import Hasura.RQL.DDL.Headers ()
|
||||||
|
import Hasura.RQL.Types.Backend
|
||||||
|
import Hasura.RQL.Types.Common
|
||||||
|
import Hasura.RQL.Types.Error
|
||||||
|
import Hasura.SQL.Backend
|
||||||
|
|
||||||
|
|
||||||
|
instance Backend 'Postgres where
|
||||||
|
type SourceConfig 'Postgres = PG.PGSourceConfig
|
||||||
|
type SourceConnConfiguration 'Postgres = PG.PostgresConnConfiguration
|
||||||
|
type Identifier 'Postgres = PG.Identifier
|
||||||
|
type Alias 'Postgres = PG.Alias
|
||||||
|
type TableName 'Postgres = PG.QualifiedTable
|
||||||
|
type FunctionName 'Postgres = PG.QualifiedFunction
|
||||||
|
type FunctionArgType 'Postgres = PG.QualifiedPGType
|
||||||
|
type ConstraintName 'Postgres = PG.ConstraintName
|
||||||
|
type BasicOrderType 'Postgres = PG.OrderType
|
||||||
|
type NullsOrderType 'Postgres = PG.NullsOrder
|
||||||
|
type CountType 'Postgres = PG.CountType
|
||||||
|
type Column 'Postgres = PG.PGCol
|
||||||
|
type ScalarValue 'Postgres = PG.PGScalarValue
|
||||||
|
type ScalarType 'Postgres = PG.PGScalarType
|
||||||
|
type SQLExpression 'Postgres = PG.SQLExp
|
||||||
|
type SQLOperator 'Postgres = PG.SQLOp
|
||||||
|
type XAILIKE 'Postgres = ()
|
||||||
|
type XANILIKE 'Postgres = ()
|
||||||
|
type XComputedField 'Postgres = ()
|
||||||
|
type XRemoteField 'Postgres = ()
|
||||||
|
type XEventTrigger 'Postgres = ()
|
||||||
|
type XRelay 'Postgres = ()
|
||||||
|
type XNodesAgg 'Postgres = ()
|
||||||
|
type XDistinct 'Postgres = ()
|
||||||
|
|
||||||
|
backendTag = PostgresTag
|
||||||
|
functionArgScalarType = PG._qptName
|
||||||
|
isComparableType = PG.isComparableType
|
||||||
|
isNumType = PG.isNumType
|
||||||
|
textToScalarValue = maybe (PG.PGNull PG.PGText) PG.PGValText
|
||||||
|
parseScalarValue ty val = runAesonParser (PG.parsePGValue ty) val
|
||||||
|
scalarValueToJSON = PG.pgScalarValueToJson
|
||||||
|
functionToTable = fmap (PG.TableName . PG.getFunctionTxt)
|
||||||
|
tableToFunction = fmap (PG.FunctionName . PG.getTableTxt)
|
||||||
|
|
||||||
|
tableGraphQLName = PG.qualifiedObjectToName
|
||||||
|
functionGraphQLName = PG.qualifiedObjectToName
|
||||||
|
scalarTypeGraphQLName = runExcept . mkScalarTypeName
|
||||||
|
|
||||||
|
snakeCaseTableName = PG.snakeCaseQualifiedObject
|
@ -198,7 +198,7 @@ processEventQueue logger logenv httpMgr getSchemaCache eeCtx@EventEngineCtx{..}
|
|||||||
Any serial order of updates to a row will lead to an eventually consistent state as the row will have
|
Any serial order of updates to a row will lead to an eventually consistent state as the row will have
|
||||||
(delivered=t or error=t or archived=t) after a fixed number of tries (assuming it begins with locked='f').
|
(delivered=t or error=t or archived=t) after a fixed number of tries (assuming it begins with locked='f').
|
||||||
-}
|
-}
|
||||||
pgSources <- scPostgres <$> liftIO getSchemaCache
|
pgSources <- scSources <$> liftIO getSchemaCache
|
||||||
fmap concat $ forM (M.toList pgSources) $ \(sourceName, sourceCache) ->
|
fmap concat $ forM (M.toList pgSources) $ \(sourceName, sourceCache) ->
|
||||||
case unsafeSourceConfiguration @'Postgres sourceCache of
|
case unsafeSourceConfiguration @'Postgres sourceCache of
|
||||||
Nothing -> pure []
|
Nothing -> pure []
|
||||||
@ -418,7 +418,7 @@ getEventTriggerInfoFromEvent
|
|||||||
:: SchemaCache -> Event -> Either Text (EventTriggerInfo 'Postgres)
|
:: SchemaCache -> Event -> Either Text (EventTriggerInfo 'Postgres)
|
||||||
getEventTriggerInfoFromEvent sc e = do
|
getEventTriggerInfoFromEvent sc e = do
|
||||||
let table = eTable e
|
let table = eTable e
|
||||||
mTableInfo = unsafeTableInfo @'Postgres (eSource e) table $ scPostgres sc
|
mTableInfo = unsafeTableInfo @'Postgres (eSource e) table $ scSources sc
|
||||||
tableInfo <- onNothing mTableInfo $ Left ("table '" <> table <<> "' not found")
|
tableInfo <- onNothing mTableInfo $ Left ("table '" <> table <<> "' not found")
|
||||||
let triggerName = tmName $ eTrigger e
|
let triggerName = tmName $ eTrigger e
|
||||||
mEventTriggerInfo = M.lookup triggerName (_tiEventTriggerInfoMap tableInfo)
|
mEventTriggerInfo = M.lookup triggerName (_tiEventTriggerInfoMap tableInfo)
|
||||||
|
@ -16,6 +16,7 @@ module Hasura.GraphQL.Context
|
|||||||
, SubscriptionRootField
|
, SubscriptionRootField
|
||||||
, QueryDBRoot(..)
|
, QueryDBRoot(..)
|
||||||
, MutationDBRoot(..)
|
, MutationDBRoot(..)
|
||||||
|
, traverseQueryDB
|
||||||
, traverseActionQuery
|
, traverseActionQuery
|
||||||
) where
|
) where
|
||||||
|
|
||||||
@ -120,6 +121,18 @@ type MutationRootField v = RootField (MutationDBRoot v) RemoteField (ActionM
|
|||||||
type SubscriptionRootField v = RootField (QueryDBRoot v) Void Void Void
|
type SubscriptionRootField v = RootField (QueryDBRoot v) Void Void Void
|
||||||
|
|
||||||
|
|
||||||
|
traverseQueryDB
|
||||||
|
:: forall f a b backend
|
||||||
|
. Applicative f
|
||||||
|
=> (a -> f b)
|
||||||
|
-> QueryDB backend a
|
||||||
|
-> f (QueryDB backend b)
|
||||||
|
traverseQueryDB f = \case
|
||||||
|
QDBMultipleRows s -> QDBMultipleRows <$> IR.traverseAnnSimpleSelect f s
|
||||||
|
QDBSingleRow s -> QDBSingleRow <$> IR.traverseAnnSimpleSelect f s
|
||||||
|
QDBAggregation s -> QDBAggregation <$> IR.traverseAnnAggregateSelect f s
|
||||||
|
QDBConnection s -> QDBConnection <$> IR.traverseConnectionSelect f s
|
||||||
|
|
||||||
traverseActionQuery
|
traverseActionQuery
|
||||||
:: Applicative f
|
:: Applicative f
|
||||||
=> (a -> f b)
|
=> (a -> f b)
|
||||||
|
@ -40,7 +40,6 @@ import qualified Hasura.Logging as L
|
|||||||
import qualified Hasura.Server.Telemetry.Counters as Telem
|
import qualified Hasura.Server.Telemetry.Counters as Telem
|
||||||
import qualified Hasura.Tracing as Tracing
|
import qualified Hasura.Tracing as Tracing
|
||||||
|
|
||||||
import Hasura.GraphQL.Execute.Postgres ()
|
|
||||||
import Hasura.GraphQL.Parser.Column (UnpreparedValue)
|
import Hasura.GraphQL.Parser.Column (UnpreparedValue)
|
||||||
import Hasura.GraphQL.RemoteServer (execRemoteGQ)
|
import Hasura.GraphQL.RemoteServer (execRemoteGQ)
|
||||||
import Hasura.GraphQL.Transport.HTTP.Protocol
|
import Hasura.GraphQL.Transport.HTTP.Protocol
|
||||||
@ -50,7 +49,6 @@ import Hasura.Server.Version (HasVersion)
|
|||||||
import Hasura.Session
|
import Hasura.Session
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
type QueryParts = G.TypedOperationDefinition G.FragmentSpread G.Name
|
type QueryParts = G.TypedOperationDefinition G.FragmentSpread G.Name
|
||||||
|
|
||||||
-- | Execution context
|
-- | Execution context
|
||||||
@ -195,6 +193,7 @@ createSubscriptionPlan userInfo rootFields = do
|
|||||||
qdbs <- traverse (checkField @b sourceName) allFields
|
qdbs <- traverse (checkField @b sourceName) allFields
|
||||||
lqp <- case backendTag @b of
|
lqp <- case backendTag @b of
|
||||||
PostgresTag -> LQP <$> EB.mkDBSubscriptionPlan userInfo sourceConfig qdbs
|
PostgresTag -> LQP <$> EB.mkDBSubscriptionPlan userInfo sourceConfig qdbs
|
||||||
|
MSSQLTag -> LQP <$> EB.mkDBSubscriptionPlan userInfo sourceConfig qdbs
|
||||||
pure (sourceName, lqp)
|
pure (sourceName, lqp)
|
||||||
checkField
|
checkField
|
||||||
:: forall b. Backend b
|
:: forall b. Backend b
|
||||||
|
@ -14,7 +14,6 @@ import qualified Network.HTTP.Types as HTTP
|
|||||||
|
|
||||||
import qualified Hasura.Backends.Postgres.SQL.DML as S
|
import qualified Hasura.Backends.Postgres.SQL.DML as S
|
||||||
import qualified Hasura.Backends.Postgres.Translate.Select as DS
|
import qualified Hasura.Backends.Postgres.Translate.Select as DS
|
||||||
import qualified Hasura.RQL.IR.Select as DS
|
|
||||||
import qualified Hasura.Tracing as Tracing
|
import qualified Hasura.Tracing as Tracing
|
||||||
|
|
||||||
import Hasura.Backends.Postgres.Connection
|
import Hasura.Backends.Postgres.Connection
|
||||||
@ -30,19 +29,6 @@ import Hasura.Server.Version (HasVersion)
|
|||||||
import Hasura.Session
|
import Hasura.Session
|
||||||
|
|
||||||
|
|
||||||
traverseQueryDB
|
|
||||||
:: forall f a b backend
|
|
||||||
. Applicative f
|
|
||||||
=> (a -> f b)
|
|
||||||
-> QueryDB backend a
|
|
||||||
-> f (QueryDB backend b)
|
|
||||||
traverseQueryDB f = \case
|
|
||||||
QDBMultipleRows s -> QDBMultipleRows <$> DS.traverseAnnSimpleSelect f s
|
|
||||||
QDBSingleRow s -> QDBSingleRow <$> DS.traverseAnnSimpleSelect f s
|
|
||||||
QDBAggregation s -> QDBAggregation <$> DS.traverseAnnAggregateSelect f s
|
|
||||||
QDBConnection s -> QDBConnection <$> DS.traverseConnectionSelect f s
|
|
||||||
|
|
||||||
|
|
||||||
data PreparedSql
|
data PreparedSql
|
||||||
= PreparedSql
|
= PreparedSql
|
||||||
{ _psQuery :: !Q.Query
|
{ _psQuery :: !Q.Query
|
||||||
|
@ -233,10 +233,13 @@ instance Q.ToPrepArg CohortVariablesArray where
|
|||||||
-- so if any variable values are invalid, the error will be caught early.
|
-- so if any variable values are invalid, the error will be caught early.
|
||||||
|
|
||||||
newtype ValidatedVariables f = ValidatedVariables (f TxtEncodedPGVal)
|
newtype ValidatedVariables f = ValidatedVariables (f TxtEncodedPGVal)
|
||||||
|
|
||||||
deriving instance (Show (f TxtEncodedPGVal)) => Show (ValidatedVariables f)
|
deriving instance (Show (f TxtEncodedPGVal)) => Show (ValidatedVariables f)
|
||||||
deriving instance (Eq (f TxtEncodedPGVal)) => Eq (ValidatedVariables f)
|
deriving instance (Eq (f TxtEncodedPGVal)) => Eq (ValidatedVariables f)
|
||||||
deriving instance (Hashable (f TxtEncodedPGVal)) => Hashable (ValidatedVariables f)
|
deriving instance (Hashable (f TxtEncodedPGVal)) => Hashable (ValidatedVariables f)
|
||||||
deriving instance (J.ToJSON (f TxtEncodedPGVal)) => J.ToJSON (ValidatedVariables f)
|
deriving instance (J.ToJSON (f TxtEncodedPGVal)) => J.ToJSON (ValidatedVariables f)
|
||||||
|
deriving instance (Semigroup (f TxtEncodedPGVal)) => Semigroup (ValidatedVariables f)
|
||||||
|
deriving instance (Monoid (f TxtEncodedPGVal)) => Monoid (ValidatedVariables f)
|
||||||
|
|
||||||
type ValidatedQueryVariables = ValidatedVariables (Map.HashMap G.Name)
|
type ValidatedQueryVariables = ValidatedVariables (Map.HashMap G.Name)
|
||||||
type ValidatedSyntheticVariables = ValidatedVariables []
|
type ValidatedSyntheticVariables = ValidatedVariables []
|
||||||
|
@ -4,30 +4,33 @@ module Hasura.GraphQL.Execute.Mutation
|
|||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
import qualified Data.Environment as Env
|
import qualified Data.Environment as Env
|
||||||
import qualified Data.HashMap.Strict as Map
|
import qualified Data.HashMap.Strict as Map
|
||||||
import qualified Data.HashMap.Strict.InsOrd as OMap
|
import qualified Data.HashMap.Strict.InsOrd as OMap
|
||||||
import qualified Data.Sequence.NonEmpty as NE
|
import qualified Data.Sequence.NonEmpty as NE
|
||||||
import qualified Language.GraphQL.Draft.Syntax as G
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
import qualified Network.HTTP.Client as HTTP
|
import qualified Network.HTTP.Client as HTTP
|
||||||
import qualified Network.HTTP.Types as HTTP
|
import qualified Network.HTTP.Types as HTTP
|
||||||
|
|
||||||
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
|
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
|
||||||
import qualified Hasura.Logging as L
|
import qualified Hasura.Logging as L
|
||||||
import qualified Hasura.Tracing as Tracing
|
import qualified Hasura.Tracing as Tracing
|
||||||
|
|
||||||
import Hasura.GraphQL.Context
|
import Hasura.GraphQL.Context
|
||||||
import Hasura.GraphQL.Execute.Action
|
import Hasura.GraphQL.Execute.Action
|
||||||
import Hasura.GraphQL.Execute.Backend
|
import Hasura.GraphQL.Execute.Backend
|
||||||
import Hasura.GraphQL.Execute.Postgres ()
|
|
||||||
import Hasura.GraphQL.Execute.Remote
|
import Hasura.GraphQL.Execute.Remote
|
||||||
import Hasura.GraphQL.Execute.Resolve
|
import Hasura.GraphQL.Execute.Resolve
|
||||||
import Hasura.GraphQL.Parser
|
import Hasura.GraphQL.Parser
|
||||||
import Hasura.Metadata.Class
|
import Hasura.Metadata.Class
|
||||||
import Hasura.RQL.Types
|
import Hasura.RQL.Types
|
||||||
import Hasura.Server.Version (HasVersion)
|
import Hasura.Server.Version (HasVersion)
|
||||||
import Hasura.Session
|
import Hasura.Session
|
||||||
|
|
||||||
|
-- backend instances
|
||||||
|
import Hasura.Backends.MSSQL.Instances.Execute ()
|
||||||
|
import Hasura.Backends.Postgres.Instances.Execute ()
|
||||||
|
|
||||||
|
|
||||||
convertMutationAction
|
convertMutationAction
|
||||||
::( HasVersion
|
::( HasVersion
|
||||||
@ -86,6 +89,7 @@ convertMutationSelectionSet env logger gqlContext SQLGenCtx{stringifyNum} userIn
|
|||||||
txs <- for unpreparedQueries \case
|
txs <- for unpreparedQueries \case
|
||||||
RFDB _ (sourceConfig :: SourceConfig b) (MDBR db) -> case backendTag @b of
|
RFDB _ (sourceConfig :: SourceConfig b) (MDBR db) -> case backendTag @b of
|
||||||
PostgresTag -> mkDBMutationPlan env manager reqHeaders userInfo stringifyNum sourceConfig db
|
PostgresTag -> mkDBMutationPlan env manager reqHeaders userInfo stringifyNum sourceConfig db
|
||||||
|
MSSQLTag -> mkDBMutationPlan env manager reqHeaders userInfo stringifyNum sourceConfig db
|
||||||
RFRemote remoteField -> do
|
RFRemote remoteField -> do
|
||||||
RemoteFieldG remoteSchemaInfo resolvedRemoteField <- resolveRemoteField userInfo remoteField
|
RemoteFieldG remoteSchemaInfo resolvedRemoteField <- resolveRemoteField userInfo remoteField
|
||||||
pure $ buildExecStepRemote remoteSchemaInfo G.OperationTypeMutation $ [G.SelectionField resolvedRemoteField]
|
pure $ buildExecStepRemote remoteSchemaInfo G.OperationTypeMutation $ [G.SelectionField resolvedRemoteField]
|
||||||
|
@ -9,31 +9,34 @@ module Hasura.GraphQL.Execute.Query
|
|||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
import qualified Data.Aeson as J
|
import qualified Data.Aeson as J
|
||||||
import qualified Data.Environment as Env
|
import qualified Data.Environment as Env
|
||||||
import qualified Data.HashMap.Strict as Map
|
import qualified Data.HashMap.Strict as Map
|
||||||
import qualified Data.HashMap.Strict.InsOrd as OMap
|
import qualified Data.HashMap.Strict.InsOrd as OMap
|
||||||
import qualified Data.Sequence.NonEmpty as NESeq
|
import qualified Data.Sequence.NonEmpty as NESeq
|
||||||
import qualified Language.GraphQL.Draft.Syntax as G
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
import qualified Network.HTTP.Client as HTTP
|
import qualified Network.HTTP.Client as HTTP
|
||||||
import qualified Network.HTTP.Types as HTTP
|
import qualified Network.HTTP.Types as HTTP
|
||||||
|
|
||||||
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
|
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
|
||||||
import qualified Hasura.Logging as L
|
import qualified Hasura.Logging as L
|
||||||
import qualified Hasura.Tracing as Tracing
|
import qualified Hasura.Tracing as Tracing
|
||||||
|
|
||||||
import Hasura.GraphQL.Context
|
import Hasura.GraphQL.Context
|
||||||
import Hasura.GraphQL.Execute.Action
|
import Hasura.GraphQL.Execute.Action
|
||||||
import Hasura.GraphQL.Execute.Backend
|
import Hasura.GraphQL.Execute.Backend
|
||||||
import Hasura.GraphQL.Execute.Common
|
import Hasura.GraphQL.Execute.Common
|
||||||
import Hasura.GraphQL.Execute.Postgres ()
|
|
||||||
import Hasura.GraphQL.Execute.Remote
|
import Hasura.GraphQL.Execute.Remote
|
||||||
import Hasura.GraphQL.Execute.Resolve
|
import Hasura.GraphQL.Execute.Resolve
|
||||||
import Hasura.GraphQL.Parser
|
import Hasura.GraphQL.Parser
|
||||||
import Hasura.RQL.Types
|
import Hasura.RQL.Types
|
||||||
import Hasura.Server.Version (HasVersion)
|
import Hasura.Server.Version (HasVersion)
|
||||||
import Hasura.Session
|
import Hasura.Session
|
||||||
|
|
||||||
|
-- backend instances
|
||||||
|
import Hasura.Backends.MSSQL.Instances.Execute ()
|
||||||
|
import Hasura.Backends.Postgres.Instances.Execute ()
|
||||||
|
|
||||||
|
|
||||||
parseGraphQLQuery
|
parseGraphQLQuery
|
||||||
:: MonadError QErr m
|
:: MonadError QErr m
|
||||||
@ -83,6 +86,7 @@ convertQuerySelSet env logger gqlContext userInfo manager reqHeaders directives
|
|||||||
executionPlan <- for unpreparedQueries \case
|
executionPlan <- for unpreparedQueries \case
|
||||||
RFDB _ (sourceConfig :: SourceConfig b) (QDBR db) -> case backendTag @b of
|
RFDB _ (sourceConfig :: SourceConfig b) (QDBR db) -> case backendTag @b of
|
||||||
PostgresTag -> mkDBQueryPlan env manager reqHeaders userInfo directives sourceConfig db
|
PostgresTag -> mkDBQueryPlan env manager reqHeaders userInfo directives sourceConfig db
|
||||||
|
MSSQLTag -> mkDBQueryPlan env manager reqHeaders userInfo directives sourceConfig db
|
||||||
RFRemote rf -> do
|
RFRemote rf -> do
|
||||||
RemoteFieldG remoteSchemaInfo remoteField <- for rf $ resolveRemoteVariable userInfo
|
RemoteFieldG remoteSchemaInfo remoteField <- for rf $ resolveRemoteVariable userInfo
|
||||||
pure $ buildExecStepRemote remoteSchemaInfo G.OperationTypeQuery [G.SelectionField remoteField]
|
pure $ buildExecStepRemote remoteSchemaInfo G.OperationTypeQuery [G.SelectionField remoteField]
|
||||||
|
@ -158,6 +158,7 @@ explainGQLQuery sc (GQLExplain query userVarsRaw maybeIsRelay) = do
|
|||||||
(_, E.LQP (execPlan :: EL.LiveQueryPlan b (E.MultiplexedQuery b))) <- E.createSubscriptionPlan userInfo unpreparedQueries
|
(_, E.LQP (execPlan :: EL.LiveQueryPlan b (E.MultiplexedQuery b))) <- E.createSubscriptionPlan userInfo unpreparedQueries
|
||||||
case backendTag @b of
|
case backendTag @b of
|
||||||
PostgresTag -> encJFromJValue <$> E.explainLiveQueryPlan execPlan
|
PostgresTag -> encJFromJValue <$> E.explainLiveQueryPlan execPlan
|
||||||
|
MSSQLTag -> pure mempty
|
||||||
where
|
where
|
||||||
queryType = bool E.QueryHasura E.QueryRelay $ Just True == maybeIsRelay
|
queryType = bool E.QueryHasura E.QueryRelay $ Just True == maybeIsRelay
|
||||||
sessionVariables = mkSessionVariablesText $ fromMaybe mempty userVarsRaw
|
sessionVariables = mkSessionVariablesText $ fromMaybe mempty userVarsRaw
|
||||||
|
@ -1,88 +1,50 @@
|
|||||||
{-# LANGUAGE Arrows #-}
|
{-# LANGUAGE Arrows #-}
|
||||||
{-# LANGUAGE ViewPatterns #-}
|
{-# LANGUAGE ViewPatterns #-}
|
||||||
{-# OPTIONS_GHC -fno-warn-orphans #-}
|
|
||||||
module Hasura.GraphQL.Schema
|
module Hasura.GraphQL.Schema
|
||||||
( buildGQLContext
|
( buildGQLContext
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
import qualified Data.Aeson as J
|
import qualified Data.Aeson as J
|
||||||
import qualified Data.HashMap.Strict as Map
|
import qualified Data.HashMap.Strict as Map
|
||||||
import qualified Data.HashMap.Strict.InsOrd as OMap
|
import qualified Data.HashMap.Strict.InsOrd as OMap
|
||||||
import qualified Data.HashSet as Set
|
import qualified Data.HashSet as Set
|
||||||
import qualified Language.GraphQL.Draft.Syntax as G
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
|
|
||||||
import Control.Arrow.Extended
|
import Control.Arrow.Extended
|
||||||
import Control.Lens.Extended
|
import Control.Lens.Extended
|
||||||
import Control.Monad.Unique
|
import Control.Monad.Unique
|
||||||
import Data.Has
|
import Data.Has
|
||||||
import Data.List.Extended (duplicates)
|
import Data.List.Extended (duplicates)
|
||||||
|
|
||||||
import qualified Hasura.Backends.Postgres.SQL.DML as PG
|
import qualified Hasura.GraphQL.Parser as P
|
||||||
import qualified Hasura.Backends.Postgres.SQL.Types as PG
|
|
||||||
import qualified Hasura.GraphQL.Parser as P
|
|
||||||
import qualified Hasura.GraphQL.Schema.Build as GSB
|
|
||||||
import qualified Hasura.GraphQL.Schema.Postgres as PGS
|
|
||||||
|
|
||||||
import Data.Text.Extended
|
import Data.Text.Extended
|
||||||
import Hasura.GraphQL.Context
|
import Hasura.GraphQL.Context
|
||||||
import Hasura.GraphQL.Execute.Types
|
import Hasura.GraphQL.Execute.Types
|
||||||
import Hasura.GraphQL.Parser (Kind (..), Parser, Schema (..),
|
import Hasura.GraphQL.Parser (Kind (..), Parser, Schema (..),
|
||||||
UnpreparedValue (..))
|
UnpreparedValue (..))
|
||||||
import Hasura.GraphQL.Parser.Class
|
import Hasura.GraphQL.Parser.Class
|
||||||
import Hasura.GraphQL.Parser.Internal.Parser (FieldParser (..))
|
import Hasura.GraphQL.Parser.Internal.Parser (FieldParser (..))
|
||||||
import Hasura.GraphQL.Schema.Backend
|
import Hasura.GraphQL.Schema.Backend
|
||||||
import Hasura.GraphQL.Schema.Common
|
import Hasura.GraphQL.Schema.Common
|
||||||
import Hasura.GraphQL.Schema.Introspect
|
import Hasura.GraphQL.Schema.Introspect
|
||||||
import Hasura.GraphQL.Schema.Remote (buildRemoteParser)
|
import Hasura.GraphQL.Schema.Postgres
|
||||||
|
import Hasura.GraphQL.Schema.Remote (buildRemoteParser)
|
||||||
import Hasura.GraphQL.Schema.Select
|
import Hasura.GraphQL.Schema.Select
|
||||||
import Hasura.GraphQL.Schema.Table
|
import Hasura.GraphQL.Schema.Table
|
||||||
import Hasura.RQL.DDL.Schema.Cache.Common
|
import Hasura.RQL.DDL.Schema.Cache.Common
|
||||||
import Hasura.RQL.Types
|
import Hasura.RQL.Types
|
||||||
import Hasura.Session
|
import Hasura.Session
|
||||||
|
|
||||||
-- Mapping from backend to schema.
|
|
||||||
-- Those instances are orphan by design: generic parsers must be written with the knowledge of the
|
|
||||||
-- BackendSchema typeclass, and the backend-specific parsers that we specify here do in turn rely on
|
|
||||||
-- those generic parsers. To avoid a include loop, we split the definition of the typeclass and of
|
|
||||||
-- its instance.
|
|
||||||
-- This should probably moved in a PG-specific section of the code (Backend/Postgres/Schema,
|
|
||||||
-- perhaps?) to avoid the proliferation of such instances as we add more backends.
|
|
||||||
|
|
||||||
instance BackendSchema 'Postgres where
|
----------------------------------------------------------------
|
||||||
-- top level parsers
|
-- Backends schema instances
|
||||||
buildTableQueryFields = GSB.buildTableQueryFields
|
|
||||||
buildTableRelayQueryFields = PGS.buildTableRelayQueryFields
|
|
||||||
buildTableInsertMutationFields = GSB.buildTableInsertMutationFields
|
|
||||||
buildTableUpdateMutationFields = GSB.buildTableUpdateMutationFields
|
|
||||||
buildTableDeleteMutationFields = GSB.buildTableDeleteMutationFields
|
|
||||||
buildFunctionQueryFields = GSB.buildFunctionQueryFields
|
|
||||||
buildFunctionRelayQueryFields = PGS.buildFunctionRelayQueryFields
|
|
||||||
buildFunctionMutationFields = GSB.buildFunctionMutationFields
|
|
||||||
-- backend extensions
|
|
||||||
relayExtension = const $ Just ()
|
|
||||||
nodesAggExtension = const $ Just ()
|
|
||||||
-- indivdual components
|
|
||||||
columnParser = PGS.columnParser
|
|
||||||
jsonPathArg = PGS.jsonPathArg
|
|
||||||
orderByOperators = PGS.orderByOperators
|
|
||||||
comparisonExps = PGS.comparisonExps
|
|
||||||
updateOperators = PGS.updateOperators
|
|
||||||
offsetParser = PGS.offsetParser
|
|
||||||
mkCountType = PGS.mkCountType
|
|
||||||
aggregateOrderByCountType = PG.PGInteger
|
|
||||||
computedField = computedFieldPG
|
|
||||||
node = nodePG
|
|
||||||
tableDistinctOn = PGS.tableDistinctOn
|
|
||||||
remoteRelationshipField = remoteRelationshipFieldPG
|
|
||||||
-- SQL literals
|
|
||||||
columnDefaultValue = const PG.columnDefaultValue
|
|
||||||
|
|
||||||
-- | Whether the request is sent with `x-hasura-use-backend-only-permissions` set to `true`.
|
import Hasura.Backends.MSSQL.Instances.Schema ()
|
||||||
data Scenario = Backend | Frontend deriving (Enum, Show, Eq)
|
import Hasura.Backends.Postgres.Instances.Schema ()
|
||||||
|
|
||||||
type RemoteSchemaCache = HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject)
|
|
||||||
|
|
||||||
----------------------------------------------------------------
|
----------------------------------------------------------------
|
||||||
-- Building contexts
|
-- Building contexts
|
||||||
@ -453,7 +415,8 @@ remoteSchemaFields = proc (queryFieldNames, mutationFieldNames, allRemoteSchemas
|
|||||||
) |) [] (Map.toList allRemoteSchemas)
|
) |) [] (Map.toList allRemoteSchemas)
|
||||||
|
|
||||||
buildQueryFields
|
buildQueryFields
|
||||||
:: forall b r m n. (BackendSchema b, MonadBuildSchema b r m n)
|
:: forall b r m n
|
||||||
|
. MonadBuildSchema b r m n
|
||||||
=> SourceName
|
=> SourceName
|
||||||
-> SourceConfig b
|
-> SourceConfig b
|
||||||
-> TableCache b
|
-> TableCache b
|
||||||
@ -478,7 +441,8 @@ buildQueryFields sourceName sourceConfig tables (takeExposedAs FEAQuery -> funct
|
|||||||
pure $ concat $ catMaybes $ tableSelectExpParsers <> functionSelectExpParsers
|
pure $ concat $ catMaybes $ tableSelectExpParsers <> functionSelectExpParsers
|
||||||
|
|
||||||
buildRelayQueryFields
|
buildRelayQueryFields
|
||||||
:: forall b r m n. (MonadBuildSchema b r m n)
|
:: forall b r m n
|
||||||
|
. MonadBuildSchema b r m n
|
||||||
=> SourceName
|
=> SourceName
|
||||||
-> SourceConfig b
|
-> SourceConfig b
|
||||||
-> TableCache b
|
-> TableCache b
|
||||||
@ -500,7 +464,8 @@ buildRelayQueryFields sourceName sourceConfig tables (takeExposedAs FEAQuery ->
|
|||||||
pure $ catMaybes $ tableConnectionFields <> functionConnectionFields
|
pure $ catMaybes $ tableConnectionFields <> functionConnectionFields
|
||||||
|
|
||||||
buildMutationFields
|
buildMutationFields
|
||||||
:: forall b r m n. (BackendSchema b, MonadBuildSchema b r m n)
|
:: forall b r m n
|
||||||
|
. MonadBuildSchema b r m n
|
||||||
=> Scenario
|
=> Scenario
|
||||||
-> SourceName
|
-> SourceName
|
||||||
-> SourceConfig b
|
-> SourceConfig b
|
||||||
@ -568,7 +533,7 @@ buildQueryParser
|
|||||||
-> Parser 'Output n (OMap.InsOrdHashMap G.Name (QueryRootField UnpreparedValue))
|
-> Parser 'Output n (OMap.InsOrdHashMap G.Name (QueryRootField UnpreparedValue))
|
||||||
-> m (Parser 'Output n (OMap.InsOrdHashMap G.Name (QueryRootField UnpreparedValue)))
|
-> m (Parser 'Output n (OMap.InsOrdHashMap G.Name (QueryRootField UnpreparedValue)))
|
||||||
buildQueryParser pgQueryFields remoteFields allActions nonObjectCustomTypes mutationParser subscriptionParser = do
|
buildQueryParser pgQueryFields remoteFields allActions nonObjectCustomTypes mutationParser subscriptionParser = do
|
||||||
actionQueryFields <- concat <$> traverse (PGS.buildActionQueryFields nonObjectCustomTypes) allActions
|
actionQueryFields <- concat <$> traverse (buildActionQueryFields nonObjectCustomTypes) allActions
|
||||||
let allQueryFields = pgQueryFields <> actionQueryFields <> map (fmap RFRemote) remoteFields
|
let allQueryFields = pgQueryFields <> actionQueryFields <> map (fmap RFRemote) remoteFields
|
||||||
queryWithIntrospectionHelper allQueryFields mutationParser subscriptionParser
|
queryWithIntrospectionHelper allQueryFields mutationParser subscriptionParser
|
||||||
|
|
||||||
@ -655,7 +620,7 @@ buildSubscriptionParser
|
|||||||
-> [ActionInfo]
|
-> [ActionInfo]
|
||||||
-> m (Parser 'Output n (OMap.InsOrdHashMap G.Name (QueryRootField UnpreparedValue)))
|
-> m (Parser 'Output n (OMap.InsOrdHashMap G.Name (QueryRootField UnpreparedValue)))
|
||||||
buildSubscriptionParser queryFields allActions = do
|
buildSubscriptionParser queryFields allActions = do
|
||||||
actionSubscriptionFields <- concat <$> traverse PGS.buildActionSubscriptionFields allActions
|
actionSubscriptionFields <- concat <$> traverse buildActionSubscriptionFields allActions
|
||||||
let subscriptionFields = queryFields <> actionSubscriptionFields
|
let subscriptionFields = queryFields <> actionSubscriptionFields
|
||||||
P.safeSelectionSet subscriptionRoot Nothing subscriptionFields
|
P.safeSelectionSet subscriptionRoot Nothing subscriptionFields
|
||||||
<&> fmap (fmap (P.handleTypename (RFRaw . J.String . G.unName)))
|
<&> fmap (fmap (P.handleTypename (RFRaw . J.String . G.unName)))
|
||||||
@ -674,7 +639,7 @@ buildMutationParser
|
|||||||
-> [P.FieldParser n (MutationRootField UnpreparedValue)]
|
-> [P.FieldParser n (MutationRootField UnpreparedValue)]
|
||||||
-> m (Maybe (Parser 'Output n (OMap.InsOrdHashMap G.Name (MutationRootField UnpreparedValue))))
|
-> m (Maybe (Parser 'Output n (OMap.InsOrdHashMap G.Name (MutationRootField UnpreparedValue))))
|
||||||
buildMutationParser allRemotes allActions nonObjectCustomTypes mutationFields = do
|
buildMutationParser allRemotes allActions nonObjectCustomTypes mutationFields = do
|
||||||
actionParsers <- concat <$> traverse (PGS.buildActionMutationFields nonObjectCustomTypes) allActions
|
actionParsers <- concat <$> traverse (buildActionMutationFields nonObjectCustomTypes) allActions
|
||||||
let mutationFieldsParser =
|
let mutationFieldsParser =
|
||||||
mutationFields <>
|
mutationFields <>
|
||||||
actionParsers <>
|
actionParsers <>
|
||||||
@ -726,3 +691,9 @@ runMonadSchema roleName queryContext pgSources extensions m =
|
|||||||
withBackendSchema :: (forall b. BackendSchema b => SourceInfo b -> r) -> BackendSourceInfo -> r
|
withBackendSchema :: (forall b. BackendSchema b => SourceInfo b -> r) -> BackendSourceInfo -> r
|
||||||
withBackendSchema f (BackendSourceInfo (bsi :: SourceInfo b)) = case backendTag @b of
|
withBackendSchema f (BackendSourceInfo (bsi :: SourceInfo b)) = case backendTag @b of
|
||||||
PostgresTag -> f bsi
|
PostgresTag -> f bsi
|
||||||
|
MSSQLTag -> f bsi
|
||||||
|
|
||||||
|
-- | Whether the request is sent with `x-hasura-use-backend-only-permissions` set to `true`.
|
||||||
|
data Scenario = Backend | Frontend deriving (Enum, Show, Eq)
|
||||||
|
|
||||||
|
type RemoteSchemaCache = HashMap RemoteSchemaName (RemoteSchemaCtx, MetadataObject)
|
||||||
|
@ -1,96 +1,21 @@
|
|||||||
-- | Postgres-specific schema combinators
|
-- | Postgres-specific schema combinators. Those should be moved to
|
||||||
|
-- the corresponding instance of `BackendSchema`, when actions are
|
||||||
|
-- generalized.
|
||||||
module Hasura.GraphQL.Schema.Postgres
|
module Hasura.GraphQL.Schema.Postgres
|
||||||
( buildTableRelayQueryFields
|
( buildActionQueryFields
|
||||||
, buildFunctionRelayQueryFields
|
|
||||||
, columnParser
|
|
||||||
, jsonPathArg
|
|
||||||
, orderByOperators
|
|
||||||
, comparisonExps
|
|
||||||
, offsetParser
|
|
||||||
, mkCountType
|
|
||||||
, tableDistinctOn
|
|
||||||
, updateOperators
|
|
||||||
|
|
||||||
, buildActionQueryFields
|
|
||||||
, buildActionSubscriptionFields
|
, buildActionSubscriptionFields
|
||||||
, buildActionMutationFields
|
, buildActionMutationFields
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
import qualified Data.Aeson as J
|
|
||||||
import qualified Data.HashMap.Strict as Map
|
|
||||||
import qualified Data.HashMap.Strict.Extended as M
|
|
||||||
import qualified Data.HashMap.Strict.InsOrd.Extended as OMap
|
|
||||||
import qualified Data.List.NonEmpty as NE
|
|
||||||
import qualified Data.Text as T
|
|
||||||
import qualified Database.PG.Query as Q
|
|
||||||
import qualified Language.GraphQL.Draft.Syntax as G
|
|
||||||
|
|
||||||
import Data.Parser.JSONPath
|
|
||||||
import Data.Text.Extended
|
|
||||||
|
|
||||||
import qualified Hasura.GraphQL.Parser as P
|
|
||||||
import qualified Hasura.RQL.IR.Select as IR
|
|
||||||
import qualified Hasura.RQL.IR.Update as IR
|
|
||||||
|
|
||||||
import Hasura.Backends.Postgres.SQL.DML as PG hiding (CountType)
|
|
||||||
import Hasura.Backends.Postgres.SQL.Types as PG hiding (FunctionName, TableName)
|
|
||||||
import Hasura.Backends.Postgres.SQL.Value as PG
|
|
||||||
import Hasura.GraphQL.Context
|
import Hasura.GraphQL.Context
|
||||||
import Hasura.GraphQL.Parser hiding (EnumValueInfo, field)
|
import Hasura.GraphQL.Parser hiding (EnumValueInfo, field)
|
||||||
import Hasura.GraphQL.Parser.Internal.Parser hiding (field)
|
|
||||||
import Hasura.GraphQL.Schema.Action
|
import Hasura.GraphQL.Schema.Action
|
||||||
import Hasura.GraphQL.Schema.Backend (BackendSchema, ComparisonExp,
|
import Hasura.GraphQL.Schema.Backend (MonadBuildSchema)
|
||||||
MonadBuildSchema)
|
|
||||||
import Hasura.GraphQL.Schema.Common
|
|
||||||
import Hasura.GraphQL.Schema.Select
|
|
||||||
import Hasura.GraphQL.Schema.Table
|
|
||||||
import Hasura.RQL.Types
|
import Hasura.RQL.Types
|
||||||
import Hasura.SQL.Types
|
|
||||||
|
|
||||||
|
|
||||||
----------------------------------------------------------------
|
|
||||||
-- Top level parsers
|
|
||||||
|
|
||||||
buildTableRelayQueryFields
|
|
||||||
:: MonadBuildSchema 'Postgres r m n
|
|
||||||
=> SourceName
|
|
||||||
-> SourceConfig 'Postgres
|
|
||||||
-> TableName 'Postgres
|
|
||||||
-> TableInfo 'Postgres
|
|
||||||
-> G.Name
|
|
||||||
-> NESeq (ColumnInfo 'Postgres)
|
|
||||||
-> SelPermInfo 'Postgres
|
|
||||||
-> m (Maybe (FieldParser n (QueryRootField UnpreparedValue)))
|
|
||||||
buildTableRelayQueryFields sourceName sourceInfo tableName tableInfo gqlName pkeyColumns selPerms = do
|
|
||||||
let
|
|
||||||
mkRF = RFDB sourceName sourceInfo . QDBR
|
|
||||||
fieldName = gqlName <> $$(G.litName "_connection")
|
|
||||||
fieldDesc = Just $ G.Description $ "fetch data from the table: " <>> tableName
|
|
||||||
optionalFieldParser (mkRF . QDBConnection) $ selectTableConnection tableName fieldName fieldDesc pkeyColumns selPerms
|
|
||||||
|
|
||||||
buildFunctionRelayQueryFields
|
|
||||||
:: MonadBuildSchema 'Postgres r m n
|
|
||||||
=> SourceName
|
|
||||||
-> SourceConfig 'Postgres
|
|
||||||
-> FunctionName 'Postgres
|
|
||||||
-> FunctionInfo 'Postgres
|
|
||||||
-> TableName 'Postgres
|
|
||||||
-> NESeq (ColumnInfo 'Postgres)
|
|
||||||
-> SelPermInfo 'Postgres
|
|
||||||
-> m (Maybe (FieldParser n (QueryRootField UnpreparedValue)))
|
|
||||||
buildFunctionRelayQueryFields sourceName sourceInfo functionName functionInfo tableName pkeyColumns selPerms = do
|
|
||||||
funcName <- functionGraphQLName @'Postgres functionName `onLeft` throwError
|
|
||||||
let
|
|
||||||
mkRF = RFDB sourceName sourceInfo . QDBR
|
|
||||||
fieldName = funcName <> $$(G.litName "_connection")
|
|
||||||
fieldDesc = Just $ G.Description $ "execute function " <> functionName <<> " which returns " <>> tableName
|
|
||||||
optionalFieldParser (mkRF . QDBConnection) $ selectFunctionConnection functionInfo fieldName fieldDesc pkeyColumns selPerms
|
|
||||||
|
|
||||||
----------------------------------------------------------
|
|
||||||
-- Action related fields
|
|
||||||
|
|
||||||
buildActionQueryFields
|
buildActionQueryFields
|
||||||
:: MonadBuildSchema 'Postgres r m n
|
:: MonadBuildSchema 'Postgres r m n
|
||||||
=> NonObjectTypeMap
|
=> NonObjectTypeMap
|
||||||
@ -127,454 +52,3 @@ buildActionSubscriptionFields actionInfo =
|
|||||||
ActionMutation ActionSynchronous -> pure Nothing
|
ActionMutation ActionSynchronous -> pure Nothing
|
||||||
ActionMutation ActionAsynchronous ->
|
ActionMutation ActionAsynchronous ->
|
||||||
fmap (fmap (RFAction . AQAsync)) <$> actionAsyncQuery actionInfo
|
fmap (fmap (RFAction . AQAsync)) <$> actionAsyncQuery actionInfo
|
||||||
|
|
||||||
|
|
||||||
----------------------------------------------------------------
|
|
||||||
-- Individual components
|
|
||||||
|
|
||||||
columnParser
|
|
||||||
:: (MonadSchema n m, MonadError QErr m)
|
|
||||||
=> ColumnType 'Postgres
|
|
||||||
-> G.Nullability
|
|
||||||
-> m (Parser 'Both n (Opaque (ColumnValue 'Postgres)))
|
|
||||||
columnParser columnType (G.Nullability isNullable) =
|
|
||||||
-- TODO(PDV): It might be worth memoizing this function even though it isn’t
|
|
||||||
-- recursive simply for performance reasons, since it’s likely to be hammered
|
|
||||||
-- during schema generation. Need to profile to see whether or not it’s a win.
|
|
||||||
opaque . fmap (ColumnValue columnType) <$> case columnType of
|
|
||||||
ColumnScalar scalarType -> possiblyNullable scalarType <$> case scalarType of
|
|
||||||
PGInteger -> pure (PGValInteger <$> P.int)
|
|
||||||
PGBoolean -> pure (PGValBoolean <$> P.boolean)
|
|
||||||
PGFloat -> pure (PGValDouble <$> P.float)
|
|
||||||
PGText -> pure (PGValText <$> P.string)
|
|
||||||
PGVarchar -> pure (PGValVarchar <$> P.string)
|
|
||||||
PGJSON -> pure (PGValJSON . Q.JSON <$> P.json)
|
|
||||||
PGJSONB -> pure (PGValJSONB . Q.JSONB <$> P.jsonb)
|
|
||||||
-- For all other scalars, we convert the value to JSON and use the
|
|
||||||
-- FromJSON instance. The major upside is that this avoids having to write
|
|
||||||
-- new parsers for each custom type: if the JSON parser is sound, so will
|
|
||||||
-- this one, and it avoids the risk of having two separate ways of parsing
|
|
||||||
-- a value in the codebase, which could lead to inconsistencies.
|
|
||||||
_ -> do
|
|
||||||
name <- mkScalarTypeName scalarType
|
|
||||||
let schemaType = P.NonNullable $ P.TNamed $ P.mkDefinition name Nothing P.TIScalar
|
|
||||||
pure $ Parser
|
|
||||||
{ pType = schemaType
|
|
||||||
, pParser =
|
|
||||||
valueToJSON (P.toGraphQLType schemaType) >=>
|
|
||||||
either (parseErrorWith ParseFailed . qeError) pure . runAesonParser (parsePGValue scalarType)
|
|
||||||
}
|
|
||||||
ColumnEnumReference (EnumReference tableName enumValues) ->
|
|
||||||
case nonEmpty (Map.toList enumValues) of
|
|
||||||
Just enumValuesList -> do
|
|
||||||
name <- qualifiedObjectToName tableName <&> (<> $$(G.litName "_enum"))
|
|
||||||
pure $ possiblyNullable PGText $ P.enum name Nothing (mkEnumValue <$> enumValuesList)
|
|
||||||
Nothing -> throw400 ValidationFailed "empty enum values"
|
|
||||||
where
|
|
||||||
-- Sadly, this combinator is not sound in general, so we can’t export it
|
|
||||||
-- for general-purpose use. If we did, someone could write this:
|
|
||||||
--
|
|
||||||
-- mkParameter <$> opaque do
|
|
||||||
-- n <- int
|
|
||||||
-- pure (mkIntColumnValue (n + 1))
|
|
||||||
--
|
|
||||||
-- Now we’d end up with a UVParameter that has a variable in it, so we’d
|
|
||||||
-- parameterize over it. But when we’d reuse the plan, we wouldn’t know to
|
|
||||||
-- increment the value by 1, so we’d use the wrong value!
|
|
||||||
--
|
|
||||||
-- We could theoretically solve this by retaining a reference to the parser
|
|
||||||
-- itself and re-parsing each new value, using the saved parser, which
|
|
||||||
-- would admittedly be neat. But it’s more complicated, and it isn’t clear
|
|
||||||
-- that it would actually be useful, so for now we don’t support it.
|
|
||||||
opaque :: MonadParse m => Parser 'Both m a -> Parser 'Both m (Opaque a)
|
|
||||||
opaque parser = parser
|
|
||||||
{ pParser = \case
|
|
||||||
P.GraphQLValue (G.VVariable var@Variable{ vInfo, vValue }) -> do
|
|
||||||
typeCheck False (P.toGraphQLType $ pType parser) var
|
|
||||||
P.mkOpaque (Just vInfo) <$> pParser parser (absurd <$> vValue)
|
|
||||||
value -> P.mkOpaque Nothing <$> pParser parser value
|
|
||||||
}
|
|
||||||
possiblyNullable scalarType
|
|
||||||
| isNullable = fmap (fromMaybe $ PGNull scalarType) . P.nullable
|
|
||||||
| otherwise = id
|
|
||||||
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, PGScalarValue)
|
|
||||||
mkEnumValue (EnumValue value, EnumValueInfo description) =
|
|
||||||
( P.mkDefinition value (G.Description <$> description) P.EnumValueInfo
|
|
||||||
, PGValText $ G.unName value
|
|
||||||
)
|
|
||||||
|
|
||||||
jsonPathArg
|
|
||||||
:: MonadParse n
|
|
||||||
=> ColumnType 'Postgres
|
|
||||||
-> InputFieldsParser n (Maybe (IR.ColumnOp 'Postgres))
|
|
||||||
jsonPathArg columnType
|
|
||||||
| isScalarColumnWhere PG.isJSONType columnType =
|
|
||||||
P.fieldOptional fieldName description P.string `P.bindFields` fmap join . traverse toColExp
|
|
||||||
| otherwise = pure Nothing
|
|
||||||
where
|
|
||||||
fieldName = $$(G.litName "path")
|
|
||||||
description = Just "JSON select path"
|
|
||||||
toColExp textValue = case parseJSONPath textValue of
|
|
||||||
Left err -> parseError $ T.pack $ "parse json path error: " ++ err
|
|
||||||
Right [] -> pure Nothing
|
|
||||||
Right jPaths -> pure $ Just $ IR.ColumnOp PG.jsonbPathOp $ PG.SEArray $ map elToColExp jPaths
|
|
||||||
elToColExp (Key k) = PG.SELit k
|
|
||||||
elToColExp (Index i) = PG.SELit $ tshow i
|
|
||||||
|
|
||||||
orderByOperators
|
|
||||||
:: NonEmpty (Definition P.EnumValueInfo, (BasicOrderType 'Postgres, NullsOrderType 'Postgres))
|
|
||||||
orderByOperators = NE.fromList
|
|
||||||
[ ( define $$(G.litName "asc") "in ascending order, nulls last"
|
|
||||||
, (PG.OTAsc, PG.NLast)
|
|
||||||
)
|
|
||||||
, ( define $$(G.litName "asc_nulls_first") "in ascending order, nulls first"
|
|
||||||
, (PG.OTAsc, PG.NFirst)
|
|
||||||
)
|
|
||||||
, ( define $$(G.litName "asc_nulls_last") "in ascending order, nulls last"
|
|
||||||
, (PG.OTAsc, PG.NLast)
|
|
||||||
)
|
|
||||||
, ( define $$(G.litName "desc") "in descending order, nulls first"
|
|
||||||
, (PG.OTDesc, PG.NFirst)
|
|
||||||
)
|
|
||||||
, ( define $$(G.litName "desc_nulls_first") "in descending order, nulls first"
|
|
||||||
, (PG.OTDesc, PG.NFirst)
|
|
||||||
)
|
|
||||||
, ( define $$(G.litName "desc_nulls_last") "in descending order, nulls last"
|
|
||||||
, (PG.OTDesc, PG.NLast)
|
|
||||||
)
|
|
||||||
]
|
|
||||||
where
|
|
||||||
define name desc = P.mkDefinition name (Just desc) P.EnumValueInfo
|
|
||||||
|
|
||||||
comparisonExps
|
|
||||||
:: forall m n. (BackendSchema 'Postgres, MonadSchema n m, MonadError QErr m)
|
|
||||||
=> ColumnType 'Postgres -> m (Parser 'Input n [ComparisonExp 'Postgres])
|
|
||||||
comparisonExps = P.memoize 'comparisonExps \columnType -> do
|
|
||||||
geogInputParser <- geographyWithinDistanceInput
|
|
||||||
geomInputParser <- geometryWithinDistanceInput
|
|
||||||
ignInputParser <- intersectsGeomNbandInput
|
|
||||||
ingInputParser <- intersectsNbandGeomInput
|
|
||||||
-- see Note [Columns in comparison expression are never nullable]
|
|
||||||
typedParser <- columnParser columnType (G.Nullability False)
|
|
||||||
nullableTextParser <- columnParser (ColumnScalar PGText) (G.Nullability True)
|
|
||||||
textParser <- columnParser (ColumnScalar PGText) (G.Nullability False)
|
|
||||||
maybeCastParser <- castExp columnType
|
|
||||||
let name = P.getName typedParser <> $$(G.litName "_comparison_exp")
|
|
||||||
desc = G.Description $ "Boolean expression to compare columns of type "
|
|
||||||
<> P.getName typedParser
|
|
||||||
<<> ". All fields are combined with logical 'AND'."
|
|
||||||
textListParser = P.list textParser `P.bind` traverse P.openOpaque
|
|
||||||
columnListParser = P.list typedParser `P.bind` traverse P.openOpaque
|
|
||||||
pure $ P.object name (Just desc) $ fmap catMaybes $ sequenceA $ concat
|
|
||||||
[ flip (maybe []) maybeCastParser $ \castParser ->
|
|
||||||
[ P.fieldOptional $$(G.litName "_cast") Nothing (ACast <$> castParser)
|
|
||||||
]
|
|
||||||
-- Common ops for all types
|
|
||||||
, [ P.fieldOptional $$(G.litName "_is_null") Nothing (bool ANISNOTNULL ANISNULL <$> P.boolean)
|
|
||||||
, P.fieldOptional $$(G.litName "_eq") Nothing (AEQ True . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_neq") Nothing (ANE True . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_in") Nothing (AIN . mkListLiteral columnType <$> columnListParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_nin") Nothing (ANIN . mkListLiteral columnType <$> columnListParser)
|
|
||||||
]
|
|
||||||
-- Comparison ops for non Raster types
|
|
||||||
, guard (isScalarColumnWhere (/= PGRaster) columnType) *>
|
|
||||||
[ P.fieldOptional $$(G.litName "_gt") Nothing (AGT . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_lt") Nothing (ALT . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_gte") Nothing (AGTE . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_lte") Nothing (ALTE . mkParameter <$> typedParser)
|
|
||||||
]
|
|
||||||
-- Ops for Raster types
|
|
||||||
, guard (isScalarColumnWhere (== PGRaster) columnType) *>
|
|
||||||
[ P.fieldOptional $$(G.litName "_st_intersects_rast")
|
|
||||||
Nothing
|
|
||||||
(ASTIntersectsRast . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_st_intersects_nband_geom")
|
|
||||||
Nothing
|
|
||||||
(ASTIntersectsNbandGeom <$> ingInputParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_st_intersects_geom_nband")
|
|
||||||
Nothing
|
|
||||||
(ASTIntersectsGeomNband <$> ignInputParser)
|
|
||||||
]
|
|
||||||
-- Ops for String like types
|
|
||||||
, guard (isScalarColumnWhere isStringType columnType) *>
|
|
||||||
[ P.fieldOptional $$(G.litName "_like")
|
|
||||||
(Just "does the column match the given pattern")
|
|
||||||
(ALIKE . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_nlike")
|
|
||||||
(Just "does the column NOT match the given pattern")
|
|
||||||
(ANLIKE . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_ilike")
|
|
||||||
(Just "does the column match the given case-insensitive pattern")
|
|
||||||
(AILIKE () . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_nilike")
|
|
||||||
(Just "does the column NOT match the given case-insensitive pattern")
|
|
||||||
(ANILIKE () . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_similar")
|
|
||||||
(Just "does the column match the given SQL regular expression")
|
|
||||||
(ASIMILAR . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_nsimilar")
|
|
||||||
(Just "does the column NOT match the given SQL regular expression")
|
|
||||||
(ANSIMILAR . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_regex")
|
|
||||||
(Just "does the column match the given POSIX regular expression, case sensitive")
|
|
||||||
(AREGEX . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_iregex")
|
|
||||||
(Just "does the column match the given POSIX regular expression, case insensitive")
|
|
||||||
(AIREGEX . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_nregex")
|
|
||||||
(Just "does the column NOT match the given POSIX regular expression, case sensitive")
|
|
||||||
(ANREGEX . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_niregex")
|
|
||||||
(Just "does the column NOT match the given POSIX regular expression, case insensitive")
|
|
||||||
(ANIREGEX . mkParameter <$> typedParser)
|
|
||||||
]
|
|
||||||
-- Ops for JSONB type
|
|
||||||
, guard (isScalarColumnWhere (== PGJSONB) columnType) *>
|
|
||||||
[ P.fieldOptional $$(G.litName "_contains")
|
|
||||||
(Just "does the column contain the given json value at the top level")
|
|
||||||
(AContains . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_contained_in")
|
|
||||||
(Just "is the column contained in the given json value")
|
|
||||||
(AContainedIn . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_has_key")
|
|
||||||
(Just "does the string exist as a top-level key in the column")
|
|
||||||
(AHasKey . mkParameter <$> nullableTextParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_has_keys_any")
|
|
||||||
(Just "do any of these strings exist as top-level keys in the column")
|
|
||||||
(AHasKeysAny . mkListLiteral (ColumnScalar PGText) <$> textListParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_has_keys_all")
|
|
||||||
(Just "do all of these strings exist as top-level keys in the column")
|
|
||||||
(AHasKeysAll . mkListLiteral (ColumnScalar PGText) <$> textListParser)
|
|
||||||
]
|
|
||||||
-- Ops for Geography type
|
|
||||||
, guard (isScalarColumnWhere (== PGGeography) columnType) *>
|
|
||||||
[ P.fieldOptional $$(G.litName "_st_intersects")
|
|
||||||
(Just "does the column spatially intersect the given geography value")
|
|
||||||
(ASTIntersects . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_st_d_within")
|
|
||||||
(Just "is the column within a given distance from the given geography value")
|
|
||||||
(ASTDWithinGeog <$> geogInputParser)
|
|
||||||
]
|
|
||||||
-- Ops for Geometry type
|
|
||||||
, guard (isScalarColumnWhere (== PGGeometry) columnType) *>
|
|
||||||
[ P.fieldOptional $$(G.litName "_st_contains")
|
|
||||||
(Just "does the column contain the given geometry value")
|
|
||||||
(ASTContains . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_st_crosses")
|
|
||||||
(Just "does the column cross the given geometry value")
|
|
||||||
(ASTCrosses . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_st_equals")
|
|
||||||
(Just "is the column equal to given geometry value (directionality is ignored)")
|
|
||||||
(ASTEquals . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_st_overlaps")
|
|
||||||
(Just "does the column 'spatially overlap' (intersect but not completely contain) the given geometry value")
|
|
||||||
(ASTOverlaps . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_st_touches")
|
|
||||||
(Just "does the column have atleast one point in common with the given geometry value")
|
|
||||||
(ASTTouches . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_st_within")
|
|
||||||
(Just "is the column contained in the given geometry value")
|
|
||||||
(ASTWithin . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_st_intersects")
|
|
||||||
(Just "does the column spatially intersect the given geometry value")
|
|
||||||
(ASTIntersects . mkParameter <$> typedParser)
|
|
||||||
, P.fieldOptional $$(G.litName "_st_d_within")
|
|
||||||
(Just "is the column within a given distance from the given geometry value")
|
|
||||||
(ASTDWithinGeom <$> geomInputParser)
|
|
||||||
]
|
|
||||||
]
|
|
||||||
where
|
|
||||||
mkListLiteral :: ColumnType 'Postgres -> [ColumnValue 'Postgres] -> UnpreparedValue 'Postgres
|
|
||||||
mkListLiteral columnType columnValues = P.UVLiteral $ SETyAnn
|
|
||||||
(SEArray $ txtEncoder . cvValue <$> columnValues)
|
|
||||||
(mkTypeAnn $ CollectableTypeArray $ unsafePGColumnToBackend columnType)
|
|
||||||
|
|
||||||
castExp :: ColumnType 'Postgres -> m (Maybe (Parser 'Input n (CastExp 'Postgres (UnpreparedValue 'Postgres))))
|
|
||||||
castExp sourceType = do
|
|
||||||
let maybeScalars = case sourceType of
|
|
||||||
ColumnScalar PGGeography -> Just (PGGeography, PGGeometry)
|
|
||||||
ColumnScalar PGGeometry -> Just (PGGeometry, PGGeography)
|
|
||||||
_ -> Nothing
|
|
||||||
|
|
||||||
forM maybeScalars $ \(sourceScalar, targetScalar) -> do
|
|
||||||
sourceName <- mkScalarTypeName sourceScalar <&> (<> $$(G.litName "_cast_exp"))
|
|
||||||
targetName <- mkScalarTypeName targetScalar
|
|
||||||
targetOpExps <- comparisonExps $ ColumnScalar targetScalar
|
|
||||||
let field = P.fieldOptional targetName Nothing $ (targetScalar, ) <$> targetOpExps
|
|
||||||
pure $ P.object sourceName Nothing $ M.fromList . maybeToList <$> field
|
|
||||||
|
|
||||||
geographyWithinDistanceInput
|
|
||||||
:: forall m n. (MonadSchema n m, MonadError QErr m)
|
|
||||||
=> m (Parser 'Input n (DWithinGeogOp (UnpreparedValue 'Postgres)))
|
|
||||||
geographyWithinDistanceInput = do
|
|
||||||
geographyParser <- columnParser (ColumnScalar PGGeography) (G.Nullability False)
|
|
||||||
-- FIXME
|
|
||||||
-- It doesn't make sense for this value to be nullable; it only is for
|
|
||||||
-- backwards compatibility; if an explicit Null value is given, it will be
|
|
||||||
-- forwarded to the underlying SQL function, that in turns treat a null value
|
|
||||||
-- as an error. We can fix this by rejecting explicit null values, by marking
|
|
||||||
-- this field non-nullable in a future release.
|
|
||||||
booleanParser <- columnParser (ColumnScalar PGBoolean) (G.Nullability True)
|
|
||||||
floatParser <- columnParser (ColumnScalar PGFloat) (G.Nullability False)
|
|
||||||
pure $ P.object $$(G.litName "st_d_within_geography_input") Nothing $
|
|
||||||
DWithinGeogOp <$> (mkParameter <$> P.field $$(G.litName "distance") Nothing floatParser)
|
|
||||||
<*> (mkParameter <$> P.field $$(G.litName "from") Nothing geographyParser)
|
|
||||||
<*> (mkParameter <$> P.fieldWithDefault $$(G.litName "use_spheroid") Nothing (G.VBoolean True) booleanParser)
|
|
||||||
|
|
||||||
geometryWithinDistanceInput
|
|
||||||
:: forall m n. (MonadSchema n m, MonadError QErr m)
|
|
||||||
=> m (Parser 'Input n (DWithinGeomOp (UnpreparedValue 'Postgres)))
|
|
||||||
geometryWithinDistanceInput = do
|
|
||||||
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
|
|
||||||
floatParser <- columnParser (ColumnScalar PGFloat) (G.Nullability False)
|
|
||||||
pure $ P.object $$(G.litName "st_d_within_input") Nothing $
|
|
||||||
DWithinGeomOp <$> (mkParameter <$> P.field $$(G.litName "distance") Nothing floatParser)
|
|
||||||
<*> (mkParameter <$> P.field $$(G.litName "from") Nothing geometryParser)
|
|
||||||
|
|
||||||
intersectsNbandGeomInput
|
|
||||||
:: forall m n. (MonadSchema n m, MonadError QErr m)
|
|
||||||
=> m (Parser 'Input n (STIntersectsNbandGeommin (UnpreparedValue 'Postgres)))
|
|
||||||
intersectsNbandGeomInput = do
|
|
||||||
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
|
|
||||||
integerParser <- columnParser (ColumnScalar PGInteger) (G.Nullability False)
|
|
||||||
pure $ P.object $$(G.litName "st_intersects_nband_geom_input") Nothing $
|
|
||||||
STIntersectsNbandGeommin <$> (mkParameter <$> P.field $$(G.litName "nband") Nothing integerParser)
|
|
||||||
<*> (mkParameter <$> P.field $$(G.litName "geommin") Nothing geometryParser)
|
|
||||||
|
|
||||||
intersectsGeomNbandInput
|
|
||||||
:: forall m n. (MonadSchema n m, MonadError QErr m)
|
|
||||||
=> m (Parser 'Input n (STIntersectsGeomminNband (UnpreparedValue 'Postgres)))
|
|
||||||
intersectsGeomNbandInput = do
|
|
||||||
geometryParser <- columnParser (ColumnScalar PGGeometry) (G.Nullability False)
|
|
||||||
integerParser <- columnParser (ColumnScalar PGInteger) (G.Nullability False)
|
|
||||||
pure $ P.object $$(G.litName "st_intersects_geom_nband_input") Nothing $ STIntersectsGeomminNband
|
|
||||||
<$> ( mkParameter <$> P.field $$(G.litName "geommin") Nothing geometryParser)
|
|
||||||
<*> (fmap mkParameter <$> P.fieldOptional $$(G.litName "nband") Nothing integerParser)
|
|
||||||
|
|
||||||
offsetParser :: MonadParse n => Parser 'Both n (SQLExpression 'Postgres)
|
|
||||||
offsetParser = PG.txtEncoder <$> Parser
|
|
||||||
{ pType = fakeBigIntSchemaType
|
|
||||||
, pParser = peelVariable (Just $ P.toGraphQLType fakeBigIntSchemaType) >=> \case
|
|
||||||
P.GraphQLValue (G.VInt i) -> PG.PGValBigInt <$> convertWith PG.scientificToInteger (fromInteger i)
|
|
||||||
P.JSONValue (J.Number n) -> PG.PGValBigInt <$> convertWith PG.scientificToInteger n
|
|
||||||
P.GraphQLValue (G.VString s) -> pure $ PG.PGValUnknown s
|
|
||||||
P.JSONValue (J.String s) -> pure $ PG.PGValUnknown s
|
|
||||||
v -> typeMismatch $$(G.litName "Int") "a 32-bit integer, or a 64-bit integer represented as a string" v
|
|
||||||
}
|
|
||||||
where
|
|
||||||
fakeBigIntSchemaType = P.NonNullable $ P.TNamed $ P.mkDefinition $$(G.litName "Int") Nothing P.TIScalar
|
|
||||||
convertWith f = either (parseErrorWith ParseFailed . qeError) pure . runAesonParser f
|
|
||||||
|
|
||||||
mkCountType :: Maybe Bool -> Maybe [Column 'Postgres] -> CountType 'Postgres
|
|
||||||
mkCountType _ Nothing = PG.CTStar
|
|
||||||
mkCountType (Just True) (Just cols) = PG.CTDistinct cols
|
|
||||||
mkCountType _ (Just cols) = PG.CTSimple cols
|
|
||||||
|
|
||||||
-- | Argument to distinct select on columns returned from table selection
|
|
||||||
-- > distinct_on: [table_select_column!]
|
|
||||||
tableDistinctOn
|
|
||||||
:: forall m n r. (BackendSchema 'Postgres, MonadSchema n m, MonadTableInfo r m, MonadRole r m)
|
|
||||||
=> TableName 'Postgres
|
|
||||||
-> SelPermInfo 'Postgres
|
|
||||||
-> m (InputFieldsParser n (Maybe (XDistinct 'Postgres, NonEmpty (Column 'Postgres))))
|
|
||||||
tableDistinctOn table selectPermissions = do
|
|
||||||
columnsEnum <- tableSelectColumnsEnum table selectPermissions
|
|
||||||
pure $ do
|
|
||||||
maybeDistinctOnColumns <- join.join <$> for columnsEnum
|
|
||||||
(P.fieldOptional distinctOnName distinctOnDesc . P.nullable . P.list)
|
|
||||||
pure $ maybeDistinctOnColumns >>= NE.nonEmpty <&> ((),)
|
|
||||||
where
|
|
||||||
distinctOnName = $$(G.litName "distinct_on")
|
|
||||||
distinctOnDesc = Just $ G.Description "distinct select on columns"
|
|
||||||
|
|
||||||
-- | Various update operators
|
|
||||||
updateOperators
|
|
||||||
:: forall m n r. (MonadSchema n m, MonadTableInfo r m)
|
|
||||||
=> QualifiedTable -- ^ qualified name of the table
|
|
||||||
-> UpdPermInfo 'Postgres -- ^ update permissions of the table
|
|
||||||
-> m (Maybe (InputFieldsParser n [(Column 'Postgres, IR.UpdOpExpG (UnpreparedValue 'Postgres))]))
|
|
||||||
updateOperators table updatePermissions = do
|
|
||||||
tableGQLName <- getTableGQLName @'Postgres table
|
|
||||||
columns <- tableUpdateColumns table updatePermissions
|
|
||||||
let numericCols = onlyNumCols columns
|
|
||||||
jsonCols = onlyJSONBCols columns
|
|
||||||
parsers <- catMaybes <$> sequenceA
|
|
||||||
[ updateOperator tableGQLName $$(G.litName "_set")
|
|
||||||
typedParser IR.UpdSet columns
|
|
||||||
"sets the columns of the filtered rows to the given values"
|
|
||||||
(G.Description $ "input type for updating data in table " <>> table)
|
|
||||||
|
|
||||||
, updateOperator tableGQLName $$(G.litName "_inc")
|
|
||||||
typedParser IR.UpdInc numericCols
|
|
||||||
"increments the numeric columns with given value of the filtered values"
|
|
||||||
(G.Description $"input type for incrementing numeric columns in table " <>> table)
|
|
||||||
|
|
||||||
, let desc = "prepend existing jsonb value of filtered columns with new jsonb value"
|
|
||||||
in updateOperator tableGQLName $$(G.litName "_prepend")
|
|
||||||
typedParser IR.UpdPrepend jsonCols desc desc
|
|
||||||
|
|
||||||
, let desc = "append existing jsonb value of filtered columns with new jsonb value"
|
|
||||||
in updateOperator tableGQLName $$(G.litName "_append")
|
|
||||||
typedParser IR.UpdAppend jsonCols desc desc
|
|
||||||
|
|
||||||
, let desc = "delete key/value pair or string element. key/value pairs are matched based on their key value"
|
|
||||||
in updateOperator tableGQLName $$(G.litName "_delete_key")
|
|
||||||
nullableTextParser IR.UpdDeleteKey jsonCols desc desc
|
|
||||||
|
|
||||||
, let desc = "delete the array element with specified index (negative integers count from the end). "
|
|
||||||
<> "throws an error if top level container is not an array"
|
|
||||||
in updateOperator tableGQLName $$(G.litName "_delete_elem")
|
|
||||||
nonNullableIntParser IR.UpdDeleteElem jsonCols desc desc
|
|
||||||
|
|
||||||
, let desc = "delete the field or element with specified path (for JSON arrays, negative integers count from the end)"
|
|
||||||
in updateOperator tableGQLName $$(G.litName "_delete_at_path")
|
|
||||||
(fmap P.list . nonNullableTextParser) IR.UpdDeleteAtPath jsonCols desc desc
|
|
||||||
]
|
|
||||||
whenMaybe (not $ null parsers) do
|
|
||||||
let allowedOperators = fst <$> parsers
|
|
||||||
pure $ fmap catMaybes (sequenceA $ snd <$> parsers)
|
|
||||||
`P.bindFields` \opExps -> do
|
|
||||||
-- there needs to be at least one operator in the update, even if it is empty
|
|
||||||
let presetColumns = Map.toList $ IR.UpdSet . partialSQLExpToUnpreparedValue <$> upiSet updatePermissions
|
|
||||||
when (null opExps && null presetColumns) $ parseError $
|
|
||||||
"at least any one of " <> commaSeparated allowedOperators <> " is expected"
|
|
||||||
|
|
||||||
-- no column should appear twice
|
|
||||||
let flattenedExps = concat opExps
|
|
||||||
erroneousExps = OMap.filter ((>1) . length) $ OMap.groupTuples flattenedExps
|
|
||||||
unless (OMap.null erroneousExps) $ parseError $
|
|
||||||
"column found in multiple operators; " <>
|
|
||||||
T.intercalate ". " [ dquote columnName <> " in " <> commaSeparated (IR.updateOperatorText <$> ops)
|
|
||||||
| (columnName, ops) <- OMap.toList erroneousExps
|
|
||||||
]
|
|
||||||
|
|
||||||
pure $ presetColumns <> flattenedExps
|
|
||||||
where
|
|
||||||
typedParser columnInfo = fmap P.mkParameter <$> columnParser (pgiType columnInfo) (G.Nullability $ pgiIsNullable columnInfo)
|
|
||||||
nonNullableTextParser _ = fmap P.mkParameter <$> columnParser (ColumnScalar PGText) (G.Nullability False)
|
|
||||||
nullableTextParser _ = fmap P.mkParameter <$> columnParser (ColumnScalar PGText) (G.Nullability True)
|
|
||||||
nonNullableIntParser _ = fmap P.mkParameter <$> columnParser (ColumnScalar PGInteger) (G.Nullability False)
|
|
||||||
|
|
||||||
updateOperator
|
|
||||||
:: G.Name
|
|
||||||
-> G.Name
|
|
||||||
-> (ColumnInfo b -> m (Parser 'Both n a))
|
|
||||||
-> (a -> IR.UpdOpExpG (UnpreparedValue b))
|
|
||||||
-> [ColumnInfo b]
|
|
||||||
-> G.Description
|
|
||||||
-> G.Description
|
|
||||||
-> m (Maybe (Text, InputFieldsParser n (Maybe [(Column b, IR.UpdOpExpG (UnpreparedValue b))])))
|
|
||||||
updateOperator tableGQLName opName mkParser updOpExp columns opDesc objDesc =
|
|
||||||
whenMaybe (not $ null columns) do
|
|
||||||
fields <- for columns \columnInfo -> do
|
|
||||||
let fieldName = pgiName columnInfo
|
|
||||||
fieldDesc = pgiDescription columnInfo
|
|
||||||
fieldParser <- mkParser columnInfo
|
|
||||||
pure $ P.fieldOptional fieldName fieldDesc fieldParser
|
|
||||||
`mapField` \value -> (pgiColumn columnInfo, updOpExp value)
|
|
||||||
let objName = tableGQLName <> opName <> $$(G.litName "_input")
|
|
||||||
pure $ (G.unName opName,)
|
|
||||||
$ P.fieldOptional opName (Just opDesc)
|
|
||||||
$ P.object objName (Just objDesc)
|
|
||||||
$ catMaybes <$> sequenceA fields
|
|
||||||
|
|
||||||
|
@ -23,44 +23,46 @@ module Hasura.GraphQL.Transport.HTTP
|
|||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
import qualified Data.Aeson as J
|
import qualified Data.Aeson as J
|
||||||
import qualified Data.Aeson.Ordered as JO
|
import qualified Data.Aeson.Ordered as JO
|
||||||
import qualified Data.ByteString.Lazy as LBS
|
import qualified Data.ByteString.Lazy as LBS
|
||||||
import qualified Data.Environment as Env
|
import qualified Data.Environment as Env
|
||||||
import qualified Data.HashMap.Strict.InsOrd as OMap
|
import qualified Data.HashMap.Strict.InsOrd as OMap
|
||||||
import qualified Data.Text as T
|
import qualified Data.Text as T
|
||||||
import qualified Language.GraphQL.Draft.Syntax as G
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
import qualified Network.HTTP.Types as HTTP
|
import qualified Network.HTTP.Types as HTTP
|
||||||
import qualified Network.Wai.Extended as Wai
|
import qualified Network.Wai.Extended as Wai
|
||||||
|
|
||||||
import Control.Lens (toListOf)
|
import Control.Lens (toListOf)
|
||||||
import Control.Monad.Morph (hoist)
|
import Control.Monad.Morph (hoist)
|
||||||
import Control.Monad.Trans.Control (MonadBaseControl)
|
import Control.Monad.Trans.Control (MonadBaseControl)
|
||||||
|
|
||||||
import qualified Hasura.GraphQL.Execute as E
|
import qualified Hasura.GraphQL.Execute as E
|
||||||
import qualified Hasura.GraphQL.Execute.Action as EA
|
import qualified Hasura.GraphQL.Execute.Action as EA
|
||||||
import qualified Hasura.GraphQL.Execute.Backend as EB
|
import qualified Hasura.GraphQL.Execute.Backend as EB
|
||||||
import qualified Hasura.GraphQL.Execute.Common as EC
|
import qualified Hasura.Logging as L
|
||||||
import qualified Hasura.Logging as L
|
import qualified Hasura.RQL.IR.RemoteJoin as IR
|
||||||
import qualified Hasura.RQL.IR.RemoteJoin as IR
|
import qualified Hasura.Server.Telemetry.Counters as Telem
|
||||||
import qualified Hasura.Server.Telemetry.Counters as Telem
|
import qualified Hasura.Tracing as Tracing
|
||||||
import qualified Hasura.Tracing as Tracing
|
|
||||||
|
|
||||||
import Hasura.EncJSON
|
import Hasura.EncJSON
|
||||||
import Hasura.GraphQL.Context
|
import Hasura.GraphQL.Context
|
||||||
import Hasura.GraphQL.Logging (MonadQueryLog (..))
|
import Hasura.GraphQL.Logging (MonadQueryLog (..))
|
||||||
import Hasura.GraphQL.Parser.Column (UnpreparedValue (..))
|
import Hasura.GraphQL.Parser.Column (UnpreparedValue (..))
|
||||||
import Hasura.GraphQL.Transport.Backend
|
import Hasura.GraphQL.Transport.Backend
|
||||||
import Hasura.GraphQL.Transport.HTTP.Protocol
|
import Hasura.GraphQL.Transport.HTTP.Protocol
|
||||||
import Hasura.GraphQL.Transport.Postgres ()
|
|
||||||
import Hasura.HTTP
|
import Hasura.HTTP
|
||||||
import Hasura.Metadata.Class
|
import Hasura.Metadata.Class
|
||||||
import Hasura.RQL.Types
|
import Hasura.RQL.Types
|
||||||
import Hasura.Server.Init.Config
|
import Hasura.Server.Init.Config
|
||||||
import Hasura.Server.Types (RequestId)
|
import Hasura.Server.Types (RequestId)
|
||||||
import Hasura.Server.Version (HasVersion)
|
import Hasura.Server.Version (HasVersion)
|
||||||
import Hasura.Session
|
import Hasura.Session
|
||||||
import Hasura.Tracing (MonadTrace, TraceT, trace)
|
import Hasura.Tracing (MonadTrace, TraceT, trace)
|
||||||
|
|
||||||
|
-- backend instances
|
||||||
|
import Hasura.Backends.MSSQL.Instances.Transport ()
|
||||||
|
import Hasura.Backends.Postgres.Instances.Transport ()
|
||||||
|
|
||||||
|
|
||||||
data QueryCacheKey = QueryCacheKey
|
data QueryCacheKey = QueryCacheKey
|
||||||
@ -157,7 +159,7 @@ filterVariablesFromQuery query = fold $ rootToSessVarPreds =<< query
|
|||||||
where
|
where
|
||||||
rootToSessVarPreds :: RootField (QueryDBRoot UnpreparedValue) c h d -> [SessVarPred]
|
rootToSessVarPreds :: RootField (QueryDBRoot UnpreparedValue) c h d -> [SessVarPred]
|
||||||
rootToSessVarPreds = \case
|
rootToSessVarPreds = \case
|
||||||
RFDB _ _ (QDBR db) -> toPred <$> toListOf EC.traverseQueryDB db
|
RFDB _ _ (QDBR db) -> toPred <$> toListOf traverseQueryDB db
|
||||||
_ -> []
|
_ -> []
|
||||||
|
|
||||||
toPred :: UnpreparedValue bet -> SessVarPred
|
toPred :: UnpreparedValue bet -> SessVarPred
|
||||||
@ -213,6 +215,7 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do
|
|||||||
E.ExecStepDB (_ :: SourceConfig b) genSql _headers _tx ->
|
E.ExecStepDB (_ :: SourceConfig b) genSql _headers _tx ->
|
||||||
case backendTag @b of
|
case backendTag @b of
|
||||||
PostgresTag -> IR._rjRemoteSchema <$> maybe [] (EB.getRemoteJoins @b) genSql
|
PostgresTag -> IR._rjRemoteSchema <$> maybe [] (EB.getRemoteJoins @b) genSql
|
||||||
|
MSSQLTag -> IR._rjRemoteSchema <$> maybe [] (EB.getRemoteJoins @b) genSql
|
||||||
_ -> []
|
_ -> []
|
||||||
(responseHeaders, cachedValue) <- Tracing.interpTraceT (liftEitherM . runExceptT) $ cacheLookup remoteJoins cacheKey
|
(responseHeaders, cachedValue) <- Tracing.interpTraceT (liftEitherM . runExceptT) $ cacheLookup remoteJoins cacheKey
|
||||||
case fmap decodeGQResp cachedValue of
|
case fmap decodeGQResp cachedValue of
|
||||||
@ -223,6 +226,7 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do
|
|||||||
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql _headers tx -> doQErr $ do
|
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql _headers tx -> doQErr $ do
|
||||||
(telemTimeIO_DT, resp) <- case backendTag @b of
|
(telemTimeIO_DT, resp) <- case backendTag @b of
|
||||||
PostgresTag -> runDBQuery reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql
|
PostgresTag -> runDBQuery reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql
|
||||||
|
MSSQLTag -> runDBQuery reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql
|
||||||
return $ ResultsFragment telemTimeIO_DT Telem.Local resp []
|
return $ ResultsFragment telemTimeIO_DT Telem.Local resp []
|
||||||
E.ExecStepRemote rsi gqlReq ->
|
E.ExecStepRemote rsi gqlReq ->
|
||||||
runRemoteGQ httpManager fieldName rsi gqlReq
|
runRemoteGQ httpManager fieldName rsi gqlReq
|
||||||
@ -240,6 +244,7 @@ runGQ env logger reqId userInfo ipAddress reqHeaders queryType reqUnparsed = do
|
|||||||
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql responseHeaders tx -> doQErr $ do
|
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql responseHeaders tx -> doQErr $ do
|
||||||
(telemTimeIO_DT, resp) <- case backendTag @b of
|
(telemTimeIO_DT, resp) <- case backendTag @b of
|
||||||
PostgresTag -> runDBMutation reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql
|
PostgresTag -> runDBMutation reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql
|
||||||
|
MSSQLTag -> runDBMutation reqId reqUnparsed fieldName userInfo logger sourceConfig tx genSql
|
||||||
return $ ResultsFragment telemTimeIO_DT Telem.Local resp responseHeaders
|
return $ ResultsFragment telemTimeIO_DT Telem.Local resp responseHeaders
|
||||||
E.ExecStepRemote rsi gqlReq ->
|
E.ExecStepRemote rsi gqlReq ->
|
||||||
runRemoteGQ httpManager fieldName rsi gqlReq
|
runRemoteGQ httpManager fieldName rsi gqlReq
|
||||||
|
@ -17,70 +17,73 @@ module Hasura.GraphQL.Transport.WebSocket
|
|||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
import qualified Control.Concurrent.Async.Lifted.Safe as LA
|
import qualified Control.Concurrent.Async.Lifted.Safe as LA
|
||||||
import qualified Control.Concurrent.STM as STM
|
import qualified Control.Concurrent.STM as STM
|
||||||
import qualified Control.Monad.Trans.Control as MC
|
import qualified Control.Monad.Trans.Control as MC
|
||||||
import qualified Data.Aeson as J
|
import qualified Data.Aeson as J
|
||||||
import qualified Data.Aeson.Casing as J
|
import qualified Data.Aeson.Casing as J
|
||||||
import qualified Data.Aeson.Ordered as JO
|
import qualified Data.Aeson.Ordered as JO
|
||||||
import qualified Data.Aeson.TH as J
|
import qualified Data.Aeson.TH as J
|
||||||
import qualified Data.ByteString.Lazy as LBS
|
import qualified Data.ByteString.Lazy as LBS
|
||||||
import qualified Data.CaseInsensitive as CI
|
import qualified Data.CaseInsensitive as CI
|
||||||
import qualified Data.Environment as Env
|
import qualified Data.Environment as Env
|
||||||
import qualified Data.HashMap.Strict as Map
|
import qualified Data.HashMap.Strict as Map
|
||||||
import qualified Data.HashMap.Strict.InsOrd as OMap
|
import qualified Data.HashMap.Strict.InsOrd as OMap
|
||||||
import qualified Data.Text as T
|
import qualified Data.Text as T
|
||||||
import qualified Data.Text.Encoding as TE
|
import qualified Data.Text.Encoding as TE
|
||||||
import qualified Data.Time.Clock as TC
|
import qualified Data.Time.Clock as TC
|
||||||
import qualified Language.GraphQL.Draft.Syntax as G
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
import qualified ListT
|
import qualified ListT
|
||||||
import qualified Network.HTTP.Client as H
|
import qualified Network.HTTP.Client as H
|
||||||
import qualified Network.HTTP.Types as H
|
import qualified Network.HTTP.Types as H
|
||||||
import qualified Network.Wai.Extended as Wai
|
import qualified Network.Wai.Extended as Wai
|
||||||
import qualified Network.WebSockets as WS
|
import qualified Network.WebSockets as WS
|
||||||
import qualified StmContainers.Map as STMMap
|
import qualified StmContainers.Map as STMMap
|
||||||
|
|
||||||
import Control.Concurrent.Extended (sleep)
|
import Control.Concurrent.Extended (sleep)
|
||||||
import Control.Exception.Lifted
|
import Control.Exception.Lifted
|
||||||
import Data.String
|
import Data.String
|
||||||
#ifndef PROFILING
|
#ifndef PROFILING
|
||||||
import GHC.AssertNF
|
import GHC.AssertNF
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
import qualified Hasura.GraphQL.Execute as E
|
import qualified Hasura.GraphQL.Execute as E
|
||||||
import qualified Hasura.GraphQL.Execute.Action as EA
|
import qualified Hasura.GraphQL.Execute.Action as EA
|
||||||
import qualified Hasura.GraphQL.Execute.Backend as EB
|
import qualified Hasura.GraphQL.Execute.Backend as EB
|
||||||
import qualified Hasura.GraphQL.Execute.LiveQuery.Plan as LQ
|
import qualified Hasura.GraphQL.Execute.LiveQuery.Plan as LQ
|
||||||
import qualified Hasura.GraphQL.Execute.LiveQuery.Poll as LQ
|
import qualified Hasura.GraphQL.Execute.LiveQuery.Poll as LQ
|
||||||
import qualified Hasura.GraphQL.Execute.LiveQuery.State as LQ
|
import qualified Hasura.GraphQL.Execute.LiveQuery.State as LQ
|
||||||
import qualified Hasura.GraphQL.Transport.WebSocket.Server as WS
|
import qualified Hasura.GraphQL.Transport.WebSocket.Server as WS
|
||||||
import qualified Hasura.Logging as L
|
import qualified Hasura.Logging as L
|
||||||
import qualified Hasura.RQL.IR.RemoteJoin as IR
|
import qualified Hasura.RQL.IR.RemoteJoin as IR
|
||||||
import qualified Hasura.Server.Telemetry.Counters as Telem
|
import qualified Hasura.Server.Telemetry.Counters as Telem
|
||||||
import qualified Hasura.Tracing as Tracing
|
import qualified Hasura.Tracing as Tracing
|
||||||
|
|
||||||
import Hasura.EncJSON
|
import Hasura.EncJSON
|
||||||
import Hasura.GraphQL.Logging (MonadQueryLog (..))
|
import Hasura.GraphQL.Logging (MonadQueryLog (..))
|
||||||
import Hasura.GraphQL.Transport.Backend
|
import Hasura.GraphQL.Transport.Backend
|
||||||
import Hasura.GraphQL.Transport.HTTP (MonadExecuteQuery (..),
|
import Hasura.GraphQL.Transport.HTTP (MonadExecuteQuery (..),
|
||||||
QueryCacheKey (..),
|
QueryCacheKey (..),
|
||||||
ResultsFragment (..), buildRaw,
|
ResultsFragment (..), buildRaw,
|
||||||
extractFieldFromResponse,
|
extractFieldFromResponse,
|
||||||
filterVariablesFromQuery,
|
filterVariablesFromQuery,
|
||||||
runSessVarPred)
|
runSessVarPred)
|
||||||
import Hasura.GraphQL.Transport.HTTP.Protocol
|
import Hasura.GraphQL.Transport.HTTP.Protocol
|
||||||
import Hasura.GraphQL.Transport.Postgres ()
|
|
||||||
import Hasura.GraphQL.Transport.WebSocket.Protocol
|
import Hasura.GraphQL.Transport.WebSocket.Protocol
|
||||||
import Hasura.Metadata.Class
|
import Hasura.Metadata.Class
|
||||||
import Hasura.RQL.Types
|
import Hasura.RQL.Types
|
||||||
import Hasura.Server.Auth (AuthMode, UserAuthentication,
|
import Hasura.Server.Auth (AuthMode, UserAuthentication,
|
||||||
resolveUserInfo)
|
resolveUserInfo)
|
||||||
import Hasura.Server.Cors
|
import Hasura.Server.Cors
|
||||||
import Hasura.Server.Init.Config (KeepAliveDelay (..))
|
import Hasura.Server.Init.Config (KeepAliveDelay (..))
|
||||||
import Hasura.Server.Types (RequestId, getRequestId)
|
import Hasura.Server.Types (RequestId, getRequestId)
|
||||||
import Hasura.Server.Version (HasVersion)
|
import Hasura.Server.Version (HasVersion)
|
||||||
import Hasura.Session
|
import Hasura.Session
|
||||||
|
|
||||||
|
-- backend instances
|
||||||
|
import Hasura.Backends.MSSQL.Instances.Transport ()
|
||||||
|
import Hasura.Backends.Postgres.Instances.Transport ()
|
||||||
|
|
||||||
|
|
||||||
-- | 'LQ.LiveQueryId' comes from 'Hasura.GraphQL.Execute.LiveQuery.State.addLiveQuery'. We use
|
-- | 'LQ.LiveQueryId' comes from 'Hasura.GraphQL.Execute.LiveQuery.State.addLiveQuery'. We use
|
||||||
-- this to track a connection's operations so we can remove them from 'LiveQueryState', and
|
-- this to track a connection's operations so we can remove them from 'LiveQueryState', and
|
||||||
@ -383,6 +386,7 @@ onStart env serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
|
|||||||
E.ExecStepDB (_ :: SourceConfig b) genSql _headers _tx ->
|
E.ExecStepDB (_ :: SourceConfig b) genSql _headers _tx ->
|
||||||
case backendTag @b of
|
case backendTag @b of
|
||||||
PostgresTag -> IR._rjRemoteSchema <$> maybe [] (EB.getRemoteJoins @b) genSql
|
PostgresTag -> IR._rjRemoteSchema <$> maybe [] (EB.getRemoteJoins @b) genSql
|
||||||
|
MSSQLTag -> IR._rjRemoteSchema <$> maybe [] (EB.getRemoteJoins @b) genSql
|
||||||
_ -> []
|
_ -> []
|
||||||
-- We ignore the response headers (containing TTL information) because
|
-- We ignore the response headers (containing TTL information) because
|
||||||
-- WebSockets don't support them.
|
-- WebSockets don't support them.
|
||||||
@ -395,6 +399,7 @@ onStart env serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
|
|||||||
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql _headerss tx -> doQErr $ do
|
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql _headerss tx -> doQErr $ do
|
||||||
(telemTimeIO_DT, resp) <- case backendTag @b of
|
(telemTimeIO_DT, resp) <- case backendTag @b of
|
||||||
PostgresTag -> runDBQuery requestId q fieldName userInfo logger sourceConfig tx genSql
|
PostgresTag -> runDBQuery requestId q fieldName userInfo logger sourceConfig tx genSql
|
||||||
|
MSSQLTag -> runDBQuery requestId q fieldName userInfo logger sourceConfig tx genSql
|
||||||
return $ ResultsFragment telemTimeIO_DT Telem.Local resp []
|
return $ ResultsFragment telemTimeIO_DT Telem.Local resp []
|
||||||
E.ExecStepRemote rsi gqlReq -> do
|
E.ExecStepRemote rsi gqlReq -> do
|
||||||
runRemoteGQ fieldName userInfo reqHdrs rsi gqlReq
|
runRemoteGQ fieldName userInfo reqHdrs rsi gqlReq
|
||||||
@ -416,6 +421,7 @@ onStart env serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
|
|||||||
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql _responseHeaders tx -> doQErr $ do
|
E.ExecStepDB (sourceConfig :: SourceConfig b) genSql _responseHeaders tx -> doQErr $ do
|
||||||
(telemTimeIO_DT, resp) <- case backendTag @b of
|
(telemTimeIO_DT, resp) <- case backendTag @b of
|
||||||
PostgresTag -> runDBMutation requestId q fieldName userInfo logger sourceConfig tx genSql
|
PostgresTag -> runDBMutation requestId q fieldName userInfo logger sourceConfig tx genSql
|
||||||
|
MSSQLTag -> runDBMutation requestId q fieldName userInfo logger sourceConfig tx genSql
|
||||||
return $ ResultsFragment telemTimeIO_DT Telem.Local resp []
|
return $ ResultsFragment telemTimeIO_DT Telem.Local resp []
|
||||||
E.ExecStepAction actionExecPlan hdrs -> do
|
E.ExecStepAction actionExecPlan hdrs -> do
|
||||||
(time, r) <- doQErr $ EA.runActionExecution actionExecPlan
|
(time, r) <- doQErr $ EA.runActionExecution actionExecPlan
|
||||||
@ -438,6 +444,7 @@ onStart env serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
|
|||||||
-- crucial we don't lose lqId after addLiveQuery returns successfully.
|
-- crucial we don't lose lqId after addLiveQuery returns successfully.
|
||||||
!lqId <- liftIO $ case backendTag @b of
|
!lqId <- liftIO $ case backendTag @b of
|
||||||
PostgresTag -> LQ.addLiveQuery logger subscriberMetadata lqMap sourceName liveQueryPlan liveQOnChange
|
PostgresTag -> LQ.addLiveQuery logger subscriberMetadata lqMap sourceName liveQueryPlan liveQOnChange
|
||||||
|
MSSQLTag -> LQ.addLiveQuery logger subscriberMetadata lqMap sourceName liveQueryPlan liveQOnChange
|
||||||
let !opName = _grOperationName q
|
let !opName = _grOperationName q
|
||||||
#ifndef PROFILING
|
#ifndef PROFILING
|
||||||
liftIO $ $assertNFHere (lqId, opName) -- so we don't write thunks to mutable vars
|
liftIO $ $assertNFHere (lqId, opName) -- so we don't write thunks to mutable vars
|
||||||
|
@ -196,7 +196,7 @@ askTabInfoFromTrigger
|
|||||||
=> SourceName -> TriggerName -> m (TableInfo 'Postgres)
|
=> SourceName -> TriggerName -> m (TableInfo 'Postgres)
|
||||||
askTabInfoFromTrigger sourceName trn = do
|
askTabInfoFromTrigger sourceName trn = do
|
||||||
sc <- askSchemaCache
|
sc <- askSchemaCache
|
||||||
let tabInfos = HM.elems $ fromMaybe mempty $ unsafeTableCache sourceName $ scPostgres sc
|
let tabInfos = HM.elems $ fromMaybe mempty $ unsafeTableCache sourceName $ scSources sc
|
||||||
find (isJust . HM.lookup trn . _tiEventTriggerInfoMap) tabInfos
|
find (isJust . HM.lookup trn . _tiEventTriggerInfoMap) tabInfos
|
||||||
`onNothing` throw400 NotExists errMsg
|
`onNothing` throw400 NotExists errMsg
|
||||||
where
|
where
|
||||||
|
@ -27,6 +27,7 @@ import qualified Data.List as L
|
|||||||
|
|
||||||
import Control.Lens ((.~), (^?))
|
import Control.Lens ((.~), (^?))
|
||||||
import Data.Aeson
|
import Data.Aeson
|
||||||
|
import Data.Typeable (cast)
|
||||||
|
|
||||||
import Hasura.Backends.Postgres.DDL.Table (delTriggerQ)
|
import Hasura.Backends.Postgres.DDL.Table (delTriggerQ)
|
||||||
import Hasura.Metadata.Class
|
import Hasura.Metadata.Class
|
||||||
@ -100,7 +101,8 @@ runReplaceMetadataV1 =
|
|||||||
(successMsg <$) . runReplaceMetadataV2 . ReplaceMetadataV2 NoAllowInconsistentMetadata
|
(successMsg <$) . runReplaceMetadataV2 . ReplaceMetadataV2 NoAllowInconsistentMetadata
|
||||||
|
|
||||||
runReplaceMetadataV2
|
runReplaceMetadataV2
|
||||||
:: ( QErrM m
|
:: forall m
|
||||||
|
. ( QErrM m
|
||||||
, CacheRWM m
|
, CacheRWM m
|
||||||
, MetadataM m
|
, MetadataM m
|
||||||
, MonadIO m
|
, MonadIO m
|
||||||
@ -131,19 +133,29 @@ runReplaceMetadataV2 ReplaceMetadataV2{..} = do
|
|||||||
buildSchemaCacheStrict
|
buildSchemaCacheStrict
|
||||||
|
|
||||||
-- See Note [Clear postgres schema for dropped triggers]
|
-- See Note [Clear postgres schema for dropped triggers]
|
||||||
for_ (OMap.toList $ _metaSources metadata) $ \(source, newSourceCache) ->
|
dropPostgresTriggers (getOnlyPGSources oldMetadata) (getOnlyPGSources metadata)
|
||||||
onJust (OMap.lookup source $ _metaSources oldMetadata) $ \oldSourceCache -> do
|
|
||||||
let getTriggersMap (BackendSourceMetadata sm) =
|
|
||||||
(OMap.unions . map _tmEventTriggers . OMap.elems . _smTables) sm
|
|
||||||
oldTriggersMap = getTriggersMap oldSourceCache
|
|
||||||
newTriggersMap = getTriggersMap newSourceCache
|
|
||||||
droppedTriggers = OMap.keys $ oldTriggersMap `OMap.difference` newTriggersMap
|
|
||||||
sourceConfig <- askSourceConfig source
|
|
||||||
for_ droppedTriggers $
|
|
||||||
\name -> liftIO $ runPgSourceWriteTx sourceConfig $ delTriggerQ name >> archiveEvents name
|
|
||||||
|
|
||||||
sc <- askSchemaCache
|
sc <- askSchemaCache
|
||||||
pure $ encJFromJValue $ formatInconsistentObjs $ scInconsistentObjs sc
|
pure $ encJFromJValue $ formatInconsistentObjs $ scInconsistentObjs sc
|
||||||
|
where
|
||||||
|
getOnlyPGSources :: Metadata -> InsOrdHashMap SourceName (SourceMetadata 'Postgres)
|
||||||
|
getOnlyPGSources = OMap.mapMaybe (\(BackendSourceMetadata sm) -> cast sm) . _metaSources
|
||||||
|
|
||||||
|
dropPostgresTriggers
|
||||||
|
:: InsOrdHashMap SourceName (SourceMetadata 'Postgres) -- ^ old pg sources
|
||||||
|
-> InsOrdHashMap SourceName (SourceMetadata 'Postgres) -- ^ new pg sources
|
||||||
|
-> m ()
|
||||||
|
dropPostgresTriggers oldSources newSources =
|
||||||
|
for_ (OMap.toList newSources) $ \(source, newSourceCache) ->
|
||||||
|
onJust (OMap.lookup source oldSources) $ \oldSourceCache -> do
|
||||||
|
let oldTriggersMap = getPGTriggersMap oldSourceCache
|
||||||
|
newTriggersMap = getPGTriggersMap newSourceCache
|
||||||
|
droppedTriggers = OMap.keys $ oldTriggersMap `OMap.difference` newTriggersMap
|
||||||
|
sourceConfig <- askSourceConfig source
|
||||||
|
for_ droppedTriggers $
|
||||||
|
\name -> liftIO $ runPgSourceWriteTx sourceConfig $ delTriggerQ name >> archiveEvents name
|
||||||
|
where
|
||||||
|
getPGTriggersMap = OMap.unions . map _tmEventTriggers . OMap.elems . _smTables
|
||||||
|
|
||||||
|
|
||||||
runExportMetadata
|
runExportMetadata
|
||||||
@ -169,7 +181,7 @@ runReloadMetadata (ReloadMetadata reloadRemoteSchemas reloadSources) = do
|
|||||||
RSReloadAll -> HS.fromList $ getAllRemoteSchemas sc
|
RSReloadAll -> HS.fromList $ getAllRemoteSchemas sc
|
||||||
RSReloadList l -> l
|
RSReloadList l -> l
|
||||||
pgSourcesInvalidations = case reloadSources of
|
pgSourcesInvalidations = case reloadSources of
|
||||||
RSReloadAll -> HS.fromList $ HM.keys $ scPostgres sc
|
RSReloadAll -> HS.fromList $ HM.keys $ scSources sc
|
||||||
RSReloadList l -> l
|
RSReloadList l -> l
|
||||||
cacheInvalidations = CacheInvalidations
|
cacheInvalidations = CacheInvalidations
|
||||||
{ ciMetadata = True
|
{ ciMetadata = True
|
||||||
@ -232,6 +244,17 @@ purgeMetadataObj = \case
|
|||||||
MTORemoteRelationship rn -> dropRemoteRelationshipInMetadata rn
|
MTORemoteRelationship rn -> dropRemoteRelationshipInMetadata rn
|
||||||
SMOFunction qf -> dropFunctionInMetadata source qf
|
SMOFunction qf -> dropFunctionInMetadata source qf
|
||||||
SMOFunctionPermission qf rn -> dropFunctionPermissionInMetadata source qf rn
|
SMOFunctionPermission qf rn -> dropFunctionPermissionInMetadata source qf rn
|
||||||
|
MSSQLTag -> case sourceObjId of
|
||||||
|
SMOTable qt -> dropTableInMetadata source qt
|
||||||
|
SMOTableObj qt tableObj -> MetadataModifier $
|
||||||
|
tableMetadataSetter source qt %~ case tableObj of
|
||||||
|
MTORel rn _ -> dropRelationshipInMetadata rn
|
||||||
|
MTOPerm rn pt -> dropPermissionInMetadata rn pt
|
||||||
|
MTOTrigger trn -> dropEventTriggerInMetadata trn
|
||||||
|
MTOComputedField ccn -> dropComputedFieldInMetadata ccn
|
||||||
|
MTORemoteRelationship rn -> dropRemoteRelationshipInMetadata rn
|
||||||
|
SMOFunction qf -> dropFunctionInMetadata source qf
|
||||||
|
SMOFunctionPermission qf rn -> dropFunctionPermissionInMetadata source qf rn
|
||||||
MORemoteSchema rsn -> dropRemoteSchemaInMetadata rsn
|
MORemoteSchema rsn -> dropRemoteSchemaInMetadata rsn
|
||||||
MORemoteSchemaPermissions rsName role -> dropRemoteSchemaPermissionInMetadata rsName role
|
MORemoteSchemaPermissions rsName role -> dropRemoteSchemaPermissionInMetadata rsName role
|
||||||
MOCustomTypes -> clearCustomTypesInMetadata
|
MOCustomTypes -> clearCustomTypesInMetadata
|
||||||
|
@ -41,7 +41,6 @@ import qualified Data.HashSet as HS
|
|||||||
|
|
||||||
import Control.Lens ((.~))
|
import Control.Lens ((.~))
|
||||||
import Data.Aeson
|
import Data.Aeson
|
||||||
import Data.Aeson.TH
|
|
||||||
import Data.Text.Extended
|
import Data.Text.Extended
|
||||||
|
|
||||||
import Hasura.EncJSON
|
import Hasura.EncJSON
|
||||||
@ -328,18 +327,20 @@ instance (BackendMetadata b) => IsPerm b (DelPerm b) where
|
|||||||
addPermToMetadata permDef =
|
addPermToMetadata permDef =
|
||||||
tmDeletePermissions %~ OMap.insert (_pdRole permDef) permDef
|
tmDeletePermissions %~ OMap.insert (_pdRole permDef) permDef
|
||||||
|
|
||||||
data SetPermComment
|
data SetPermComment b
|
||||||
= SetPermComment
|
= SetPermComment
|
||||||
{ apSource :: !SourceName
|
{ apSource :: !SourceName
|
||||||
, apTable :: !(TableName 'Postgres)
|
, apTable :: !(TableName b)
|
||||||
, apRole :: !RoleName
|
, apRole :: !RoleName
|
||||||
, apPermission :: !PermType
|
, apPermission :: !PermType
|
||||||
, apComment :: !(Maybe Text)
|
, apComment :: !(Maybe Text)
|
||||||
} deriving (Show, Eq)
|
} deriving (Generic)
|
||||||
|
deriving instance (Backend b) => Show (SetPermComment b)
|
||||||
|
deriving instance (Backend b) => Eq (SetPermComment b)
|
||||||
|
instance (Backend b) => ToJSON (SetPermComment b) where
|
||||||
|
toJSON = genericToJSON hasuraJSON
|
||||||
|
|
||||||
$(deriveToJSON hasuraJSON ''SetPermComment)
|
instance (Backend b) => FromJSON (SetPermComment b) where
|
||||||
|
|
||||||
instance FromJSON SetPermComment where
|
|
||||||
parseJSON = withObject "Object" $ \o ->
|
parseJSON = withObject "Object" $ \o ->
|
||||||
SetPermComment
|
SetPermComment
|
||||||
<$> o .:? "source" .!= defaultSource
|
<$> o .:? "source" .!= defaultSource
|
||||||
@ -349,8 +350,8 @@ instance FromJSON SetPermComment where
|
|||||||
<*> o .:? "comment"
|
<*> o .:? "comment"
|
||||||
|
|
||||||
runSetPermComment
|
runSetPermComment
|
||||||
:: (QErrM m, CacheRWM m, MetadataM m)
|
:: (QErrM m, CacheRWM m, MetadataM m, BackendMetadata b)
|
||||||
=> SetPermComment -> m EncJSON
|
=> SetPermComment b -> m EncJSON
|
||||||
runSetPermComment (SetPermComment source table role permType comment) = do
|
runSetPermComment (SetPermComment source table role permType comment) = do
|
||||||
tableInfo <- askTabInfo source table
|
tableInfo <- askTabInfo source table
|
||||||
|
|
||||||
|
@ -82,9 +82,6 @@ procBoolExp source tn fieldInfoMap be = do
|
|||||||
let deps = getBoolExpDeps source tn abe
|
let deps = getBoolExpDeps source tn abe
|
||||||
return (abe, deps)
|
return (abe, deps)
|
||||||
|
|
||||||
isReqUserId :: Text -> Bool
|
|
||||||
isReqUserId = (== "req_user_id") . T.toLower
|
|
||||||
|
|
||||||
getDepHeadersFromVal :: Value -> [Text]
|
getDepHeadersFromVal :: Value -> [Text]
|
||||||
getDepHeadersFromVal val = case val of
|
getDepHeadersFromVal val = case val of
|
||||||
Object o -> parseObject o
|
Object o -> parseObject o
|
||||||
|
@ -26,7 +26,6 @@ import Hasura.RQL.DDL.Deps
|
|||||||
import Hasura.RQL.DDL.Permission
|
import Hasura.RQL.DDL.Permission
|
||||||
import Hasura.RQL.Types
|
import Hasura.RQL.Types
|
||||||
|
|
||||||
|
|
||||||
runCreateRelationship
|
runCreateRelationship
|
||||||
:: (MonadError QErr m, CacheRWM m, ToJSON a, MetadataM m, Backend b, BackendMetadata b)
|
:: (MonadError QErr m, CacheRWM m, ToJSON a, MetadataM m, Backend b, BackendMetadata b)
|
||||||
=> RelType -> WithTable b (RelDef a) -> m EncJSON
|
=> RelType -> WithTable b (RelDef a) -> m EncJSON
|
||||||
@ -53,7 +52,9 @@ runCreateRelationship relType (WithTable source tableName relDef) = do
|
|||||||
$ tableMetadataSetter source tableName %~ addRelationshipToMetadata
|
$ tableMetadataSetter source tableName %~ addRelationshipToMetadata
|
||||||
pure successMsg
|
pure successMsg
|
||||||
|
|
||||||
runDropRel :: (MonadError QErr m, CacheRWM m, MetadataM m) => DropRel -> m EncJSON
|
runDropRel
|
||||||
|
:: (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b)
|
||||||
|
=> DropRel b -> m EncJSON
|
||||||
runDropRel (DropRel source qt rn cascade) = do
|
runDropRel (DropRel source qt rn cascade) = do
|
||||||
depObjs <- collectDependencies
|
depObjs <- collectDependencies
|
||||||
withNewInconsistentObjsCheck do
|
withNewInconsistentObjsCheck do
|
||||||
@ -145,8 +146,8 @@ purgeRelDep d = throw500 $ "unexpected dependency of relationship : "
|
|||||||
<> reportSchemaObj d
|
<> reportSchemaObj d
|
||||||
|
|
||||||
runSetRelComment
|
runSetRelComment
|
||||||
:: (CacheRWM m, MonadError QErr m, MetadataM m)
|
:: (CacheRWM m, MonadError QErr m, MetadataM m, BackendMetadata b)
|
||||||
=> SetRelComment -> m EncJSON
|
=> SetRelComment b -> m EncJSON
|
||||||
runSetRelComment defn = do
|
runSetRelComment defn = do
|
||||||
tabInfo <- askTableCoreInfo source qt
|
tabInfo <- askTableCoreInfo source qt
|
||||||
relType <- riType <$> askRelType (_tciFieldInfoMap tabInfo) rn ""
|
relType <- riType <$> askRelType (_tciFieldInfoMap tabInfo) rn ""
|
||||||
|
@ -3,17 +3,16 @@ module Hasura.RQL.DDL.Relationship.Rename
|
|||||||
where
|
where
|
||||||
|
|
||||||
import Data.Text.Extended
|
import Data.Text.Extended
|
||||||
import Hasura.Backends.Postgres.SQL.Types
|
|
||||||
import Hasura.EncJSON
|
import Hasura.EncJSON
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
import Hasura.RQL.DDL.Schema (renameRelationshipInMetadata)
|
import Hasura.RQL.DDL.Schema (renameRelationshipInMetadata)
|
||||||
import Hasura.RQL.Types
|
import Hasura.RQL.Types
|
||||||
|
|
||||||
import qualified Data.HashMap.Strict as Map
|
import qualified Data.HashMap.Strict as Map
|
||||||
|
|
||||||
renameRelP2
|
renameRelP2
|
||||||
:: (QErrM m, CacheRM m)
|
:: (QErrM m, CacheRM m, BackendMetadata b)
|
||||||
=> SourceName -> QualifiedTable -> RelName -> RelInfo 'Postgres -> m MetadataModifier
|
=> SourceName -> TableName b -> RelName -> RelInfo b -> m MetadataModifier
|
||||||
renameRelP2 source qt newRN relInfo = withNewInconsistentObjsCheck $ do
|
renameRelP2 source qt newRN relInfo = withNewInconsistentObjsCheck $ do
|
||||||
tabInfo <- askTableCoreInfo source qt
|
tabInfo <- askTableCoreInfo source qt
|
||||||
-- check for conflicts in fieldInfoMap
|
-- check for conflicts in fieldInfoMap
|
||||||
@ -29,8 +28,8 @@ renameRelP2 source qt newRN relInfo = withNewInconsistentObjsCheck $ do
|
|||||||
oldRN = riName relInfo
|
oldRN = riName relInfo
|
||||||
|
|
||||||
runRenameRel
|
runRenameRel
|
||||||
:: (MonadError QErr m, CacheRWM m, MetadataM m)
|
:: (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b)
|
||||||
=> RenameRel -> m EncJSON
|
=> RenameRel b -> m EncJSON
|
||||||
runRenameRel (RenameRel source qt rn newRN) = do
|
runRenameRel (RenameRel source qt rn newRN) = do
|
||||||
tabInfo <- askTableCoreInfo source qt
|
tabInfo <- askTableCoreInfo source qt
|
||||||
ri <- askRelType (_tciFieldInfoMap tabInfo) rn ""
|
ri <- askRelType (_tciFieldInfoMap tabInfo) rn ""
|
||||||
|
@ -33,6 +33,8 @@ module Hasura.RQL.DDL.Schema
|
|||||||
, RunSQL(..)
|
, RunSQL(..)
|
||||||
, runRunSQL
|
, runRunSQL
|
||||||
, isSchemaCacheBuildRequiredRunSQL
|
, isSchemaCacheBuildRequiredRunSQL
|
||||||
|
|
||||||
|
, RunSQLRes(..)
|
||||||
) where
|
) where
|
||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
@ -2,6 +2,8 @@
|
|||||||
{-# LANGUAGE OverloadedLabels #-}
|
{-# LANGUAGE OverloadedLabels #-}
|
||||||
{-# LANGUAGE UndecidableInstances #-}
|
{-# LANGUAGE UndecidableInstances #-}
|
||||||
|
|
||||||
|
{-# OPTIONS_GHC -O0 #-}
|
||||||
|
|
||||||
{-| Top-level functions concerned specifically with operations on the schema cache, such as
|
{-| Top-level functions concerned specifically with operations on the schema cache, such as
|
||||||
rebuilding it from the catalog and incorporating schema changes. See the module documentation for
|
rebuilding it from the catalog and incorporating schema changes. See the module documentation for
|
||||||
"Hasura.RQL.DDL.Schema" for more details.
|
"Hasura.RQL.DDL.Schema" for more details.
|
||||||
@ -167,7 +169,7 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
|
|||||||
throw409 $ "Ambiguous URL paths in endpoints: " <> commaSeparated (renderPath <$> ambPaths)
|
throw409 $ "Ambiguous URL paths in endpoints: " <> commaSeparated (renderPath <$> ambPaths)
|
||||||
|
|
||||||
returnA -< SchemaCache
|
returnA -< SchemaCache
|
||||||
{ scPostgres = _boSources resolvedOutputs
|
{ scSources = _boSources resolvedOutputs
|
||||||
, scActions = _boActions resolvedOutputs
|
, scActions = _boActions resolvedOutputs
|
||||||
-- TODO this is not the right value: we should track what part of the schema
|
-- TODO this is not the right value: we should track what part of the schema
|
||||||
-- we can stitch without consistencies, I think.
|
-- we can stitch without consistencies, I think.
|
||||||
@ -362,6 +364,7 @@ buildSchemaCacheRule env = proc (metadata, invalidationKeys) -> do
|
|||||||
(| Inc.keyed (\_ (BackendSourceMetadata (sourceMetadata :: SourceMetadata b)) ->
|
(| Inc.keyed (\_ (BackendSourceMetadata (sourceMetadata :: SourceMetadata b)) ->
|
||||||
case backendTag @b of
|
case backendTag @b of
|
||||||
PostgresTag -> buildSourceOutput @arr @m -< (invalidationKeys, remoteSchemaCtxMap, sourceMetadata :: SourceMetadata 'Postgres)
|
PostgresTag -> buildSourceOutput @arr @m -< (invalidationKeys, remoteSchemaCtxMap, sourceMetadata :: SourceMetadata 'Postgres)
|
||||||
|
MSSQLTag -> buildSourceOutput @arr @m -< (invalidationKeys, remoteSchemaCtxMap, sourceMetadata :: SourceMetadata 'MSSQL)
|
||||||
)
|
)
|
||||||
|) (M.fromList $ OMap.toList sources)
|
|) (M.fromList $ OMap.toList sources)
|
||||||
>-> (\infos -> M.catMaybes infos >- returnA)
|
>-> (\infos -> M.catMaybes infos >- returnA)
|
||||||
|
@ -34,10 +34,10 @@ trackFunctionP1
|
|||||||
=> SourceName -> FunctionName b -> m ()
|
=> SourceName -> FunctionName b -> m ()
|
||||||
trackFunctionP1 sourceName qf = do
|
trackFunctionP1 sourceName qf = do
|
||||||
rawSchemaCache <- askSchemaCache
|
rawSchemaCache <- askSchemaCache
|
||||||
when (isJust $ unsafeFunctionInfo @b sourceName qf $ scPostgres rawSchemaCache) $
|
when (isJust $ unsafeFunctionInfo @b sourceName qf $ scSources rawSchemaCache) $
|
||||||
throw400 AlreadyTracked $ "function already tracked : " <>> qf
|
throw400 AlreadyTracked $ "function already tracked : " <>> qf
|
||||||
let qt = functionToTable qf
|
let qt = functionToTable qf
|
||||||
when (isJust $ unsafeTableInfo @b sourceName qt $ scPostgres rawSchemaCache) $
|
when (isJust $ unsafeTableInfo @b sourceName qt $ scSources rawSchemaCache) $
|
||||||
throw400 NotSupported $ "table with name " <> qf <<> " already exists"
|
throw400 NotSupported $ "table with name " <> qf <<> " already exists"
|
||||||
|
|
||||||
trackFunctionP2
|
trackFunctionP2
|
||||||
@ -100,7 +100,7 @@ askFunctionInfo
|
|||||||
. (CacheRM m, MonadError QErr m, Backend b)
|
. (CacheRM m, MonadError QErr m, Backend b)
|
||||||
=> SourceName -> FunctionName b -> m (FunctionInfo b)
|
=> SourceName -> FunctionName b -> m (FunctionInfo b)
|
||||||
askFunctionInfo source functionName = do
|
askFunctionInfo source functionName = do
|
||||||
sourceCache <- scPostgres <$> askSchemaCache
|
sourceCache <- scSources <$> askSchemaCache
|
||||||
unsafeFunctionInfo @b source functionName sourceCache
|
unsafeFunctionInfo @b source functionName sourceCache
|
||||||
`onNothing` throw400 NotExists ("function " <> functionName <<> " not found in the cache")
|
`onNothing` throw400 NotExists ("function " <> functionName <<> " not found in the cache")
|
||||||
|
|
||||||
@ -167,7 +167,7 @@ runCreateFunctionPermission
|
|||||||
=> CreateFunctionPermission b
|
=> CreateFunctionPermission b
|
||||||
-> m EncJSON
|
-> m EncJSON
|
||||||
runCreateFunctionPermission (CreateFunctionPermission functionName source role) = do
|
runCreateFunctionPermission (CreateFunctionPermission functionName source role) = do
|
||||||
sourceCache <- scPostgres <$> askSchemaCache
|
sourceCache <- scSources <$> askSchemaCache
|
||||||
functionInfo <- askFunctionInfo source functionName
|
functionInfo <- askFunctionInfo source functionName
|
||||||
when (role `elem` _fiPermissions functionInfo) $
|
when (role `elem` _fiPermissions functionInfo) $
|
||||||
throw400 AlreadyExists $
|
throw400 AlreadyExists $
|
||||||
|
@ -31,56 +31,52 @@ mkPgSourceResolver pgLogger _ config = runExceptT do
|
|||||||
pure $ PGSourceConfig pgExecCtx connInfo Nothing
|
pure $ PGSourceConfig pgExecCtx connInfo Nothing
|
||||||
|
|
||||||
--- Metadata APIs related
|
--- Metadata APIs related
|
||||||
runAddPgSource
|
runAddSource
|
||||||
:: (MonadError QErr m, CacheRWM m, MetadataM m)
|
:: forall m b
|
||||||
=> AddPgSource -> m EncJSON
|
. (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b)
|
||||||
runAddPgSource (AddPgSource name sourceConfig) = do
|
=> AddSource b -> m EncJSON
|
||||||
let sourceConnConfig = PostgresConnConfiguration (_pccConnectionInfo sourceConfig) mempty
|
runAddSource (AddSource name sourceConfig) = do
|
||||||
sources <- scPostgres <$> askSchemaCache
|
sources <- scSources <$> askSchemaCache
|
||||||
onJust (HM.lookup name sources) $ const $
|
onJust (HM.lookup name sources) $ const $
|
||||||
throw400 AlreadyExists $ "postgres source with name " <> name <<> " already exists"
|
throw400 AlreadyExists $ "postgres source with name " <> name <<> " already exists"
|
||||||
buildSchemaCacheFor (MOSource name)
|
buildSchemaCacheFor (MOSource name)
|
||||||
$ MetadataModifier
|
$ MetadataModifier
|
||||||
$ metaSources %~ OMap.insert name (mkSourceMetadata @'Postgres name sourceConnConfig)
|
$ metaSources %~ OMap.insert name (mkSourceMetadata @b name sourceConfig)
|
||||||
pure successMsg
|
pure successMsg
|
||||||
|
|
||||||
runDropPgSource
|
runDropSource
|
||||||
:: (MonadError QErr m, CacheRWM m, MonadIO m, MonadBaseControl IO m, MetadataM m)
|
:: forall m. (MonadError QErr m, CacheRWM m, MonadIO m, MonadBaseControl IO m, MetadataM m)
|
||||||
=> DropPgSource -> m EncJSON
|
=> DropSource -> m EncJSON
|
||||||
runDropPgSource (DropPgSource name cascade) = do
|
runDropSource (DropSource name cascade) = do
|
||||||
sourceConfig <- askSourceConfig name
|
|
||||||
sc <- askSchemaCache
|
sc <- askSchemaCache
|
||||||
let indirectDeps = mapMaybe getIndirectDep $
|
let sources = scSources sc
|
||||||
getDependentObjs sc (SOSource name)
|
backendSourceInfo <- onNothing (HM.lookup name sources) $
|
||||||
|
throw400 NotExists $ "source with name " <> name <<> " does not exist"
|
||||||
when (not cascade && indirectDeps /= []) $ reportDepsExt (map (SOSourceObj name) indirectDeps) []
|
dropSource' sc backendSourceInfo
|
||||||
|
|
||||||
metadataModifier <- execWriterT $ do
|
|
||||||
mapM_ (purgeDependentObject name >=> tell) indirectDeps
|
|
||||||
tell $ MetadataModifier $ metaSources %~ OMap.delete name
|
|
||||||
|
|
||||||
buildSchemaCacheFor (MOSource name) metadataModifier
|
|
||||||
-- Clean traces of Hasura in source database
|
|
||||||
liftEitherM $ runPgSourceWriteTx sourceConfig $ do
|
|
||||||
hdbMetadataTableExist <- doesTableExist "hdb_catalog" "hdb_metadata"
|
|
||||||
eventLogTableExist <- doesTableExist "hdb_catalog" "event_log"
|
|
||||||
-- If "hdb_metadata" and "event_log" tables found in the "hdb_catalog" schema
|
|
||||||
-- then this infers the source is being used as default potgres source (--database-url option).
|
|
||||||
-- In this case don't drop any thing in the catalog schema.
|
|
||||||
if | hdbMetadataTableExist && eventLogTableExist -> pure ()
|
|
||||||
-- Otherwise, if only "hdb_metadata" table exist, then this infers the source is
|
|
||||||
-- being used as metadata storage (--metadata-database-url option). In this case
|
|
||||||
-- drop only source related tables and not "hdb_catalog" schema
|
|
||||||
| hdbMetadataTableExist ->
|
|
||||||
Q.multiQE defaultTxErrorHandler $(Q.sqlFromFile "src-rsr/drop_pg_source.sql")
|
|
||||||
-- Otherwise, drop "hdb_catalog" schema.
|
|
||||||
| otherwise -> dropHdbCatalogSchema
|
|
||||||
|
|
||||||
-- Destory postgres source connection
|
|
||||||
liftIO $ _pecDestroyConn $ _pscExecCtx sourceConfig
|
|
||||||
pure successMsg
|
pure successMsg
|
||||||
where
|
where
|
||||||
getIndirectDep :: SchemaObjId -> Maybe (SourceObjId 'Postgres)
|
dropSource' :: SchemaCache -> BackendSourceInfo -> m ()
|
||||||
getIndirectDep = \case
|
dropSource' sc (BackendSourceInfo (sourceInfo :: SourceInfo b)) =
|
||||||
SOSourceObj s o -> if s == name then Nothing else cast o -- consider only postgres backend dependencies
|
case backendTag @b of
|
||||||
_ -> Nothing
|
PostgresTag -> dropSource sc (sourceInfo :: SourceInfo 'Postgres)
|
||||||
|
MSSQLTag -> dropSource sc (sourceInfo :: SourceInfo 'MSSQL)
|
||||||
|
|
||||||
|
dropSource :: forall b. (BackendMetadata b) => SchemaCache -> SourceInfo b -> m ()
|
||||||
|
dropSource sc sourceInfo = do
|
||||||
|
let sourceConfig = _siConfiguration sourceInfo
|
||||||
|
let indirectDeps = mapMaybe getIndirectDep $
|
||||||
|
getDependentObjs sc (SOSource name)
|
||||||
|
|
||||||
|
when (not cascade && indirectDeps /= []) $ reportDepsExt (map (SOSourceObj name) indirectDeps) []
|
||||||
|
|
||||||
|
metadataModifier <- execWriterT $ do
|
||||||
|
mapM_ (purgeDependentObject name >=> tell) indirectDeps
|
||||||
|
tell $ MetadataModifier $ metaSources %~ OMap.delete name
|
||||||
|
|
||||||
|
buildSchemaCacheFor (MOSource name) metadataModifier
|
||||||
|
postDropSourceHook sourceConfig
|
||||||
|
where
|
||||||
|
getIndirectDep :: SchemaObjId -> Maybe (SourceObjId b)
|
||||||
|
getIndirectDep = \case
|
||||||
|
SOSourceObj s o -> if s == name then Nothing else cast o -- consider only *this* backend specific dependencies
|
||||||
|
_ -> Nothing
|
||||||
|
@ -43,7 +43,7 @@ import Data.Typeable (cast)
|
|||||||
|
|
||||||
import qualified Hasura.Incremental as Inc
|
import qualified Hasura.Incremental as Inc
|
||||||
|
|
||||||
import Hasura.Backends.Postgres.SQL.Types (FunctionName (..), QualifiedTable)
|
import Hasura.Backends.Postgres.SQL.Types (QualifiedTable)
|
||||||
import Hasura.EncJSON
|
import Hasura.EncJSON
|
||||||
import Hasura.GraphQL.Context
|
import Hasura.GraphQL.Context
|
||||||
import Hasura.GraphQL.Schema.Common (textToName)
|
import Hasura.GraphQL.Schema.Common (textToName)
|
||||||
@ -55,14 +55,16 @@ import Hasura.RQL.Types hiding (fmFunction)
|
|||||||
import Hasura.Server.Utils
|
import Hasura.Server.Utils
|
||||||
|
|
||||||
|
|
||||||
data TrackTable
|
data TrackTable b
|
||||||
= TrackTable
|
= TrackTable
|
||||||
{ tSource :: !SourceName
|
{ tSource :: !SourceName
|
||||||
, tName :: !QualifiedTable
|
, tName :: !(TableName b)
|
||||||
, tIsEnum :: !Bool
|
, tIsEnum :: !Bool
|
||||||
} deriving (Show, Eq)
|
}
|
||||||
|
deriving instance (Backend b) => Show (TrackTable b)
|
||||||
|
deriving instance (Backend b) => Eq (TrackTable b)
|
||||||
|
|
||||||
instance FromJSON TrackTable where
|
instance (Backend b) => FromJSON (TrackTable b) where
|
||||||
parseJSON v = withOptions <|> withoutOptions
|
parseJSON v = withOptions <|> withoutOptions
|
||||||
where
|
where
|
||||||
withOptions = flip (withObject "TrackTable") v $ \o -> TrackTable
|
withOptions = flip (withObject "TrackTable") v $ \o -> TrackTable
|
||||||
@ -71,7 +73,7 @@ instance FromJSON TrackTable where
|
|||||||
<*> o .:? "is_enum" .!= False
|
<*> o .:? "is_enum" .!= False
|
||||||
withoutOptions = TrackTable defaultSource <$> parseJSON v <*> pure False
|
withoutOptions = TrackTable defaultSource <$> parseJSON v <*> pure False
|
||||||
|
|
||||||
instance ToJSON TrackTable where
|
instance (Backend b) => ToJSON (TrackTable b) where
|
||||||
toJSON (TrackTable source name isEnum)
|
toJSON (TrackTable source name isEnum)
|
||||||
| isEnum = object [ "source" .= source, "table" .= name, "is_enum" .= isEnum ]
|
| isEnum = object [ "source" .= source, "table" .= name, "is_enum" .= isEnum ]
|
||||||
| otherwise = toJSON name
|
| otherwise = toJSON name
|
||||||
@ -109,21 +111,21 @@ instance (Backend b) => FromJSON (UntrackTable b) where
|
|||||||
<*> o .: "table"
|
<*> o .: "table"
|
||||||
<*> o .:? "cascade" .!= False
|
<*> o .:? "cascade" .!= False
|
||||||
|
|
||||||
isTableTracked :: SchemaCache -> SourceName -> QualifiedTable -> Bool
|
isTableTracked :: forall b. (Backend b) => SourceInfo b -> TableName b -> Bool
|
||||||
isTableTracked sc source tableName =
|
isTableTracked sourceInfo tableName =
|
||||||
isJust $ unsafeTableInfo @'Postgres source tableName $ scPostgres sc
|
isJust $ Map.lookup tableName $ _siTables sourceInfo
|
||||||
|
|
||||||
-- | Track table/view, Phase 1:
|
-- | Track table/view, Phase 1:
|
||||||
-- Validate table tracking operation. Fails if table is already being tracked,
|
-- Validate table tracking operation. Fails if table is already being tracked,
|
||||||
-- or if a function with the same name is being tracked.
|
-- or if a function with the same name is being tracked.
|
||||||
trackExistingTableOrViewP1 :: (QErrM m, CacheRWM m) => SourceName -> QualifiedTable -> m ()
|
trackExistingTableOrViewP1 :: forall m b. (QErrM m, CacheRWM m, Backend b) => SourceName -> TableName b -> m ()
|
||||||
trackExistingTableOrViewP1 source qt = do
|
trackExistingTableOrViewP1 source tableName = do
|
||||||
rawSchemaCache <- askSchemaCache
|
sourceInfo <- askSourceInfo source
|
||||||
when (isTableTracked rawSchemaCache source qt) $
|
when (isTableTracked sourceInfo tableName) $
|
||||||
throw400 AlreadyTracked $ "view/table already tracked : " <>> qt
|
throw400 AlreadyTracked $ "view/table already tracked : " <>> tableName
|
||||||
let qf = fmap (FunctionName . toTxt) qt
|
let functionName = tableToFunction tableName
|
||||||
when (isJust $ unsafeFunctionInfo @'Postgres source qf $ scPostgres rawSchemaCache) $
|
when (isJust $ Map.lookup functionName $ _siFunctions sourceInfo) $
|
||||||
throw400 NotSupported $ "function with name " <> qt <<> " already exists"
|
throw400 NotSupported $ "function with name " <> tableName <<> " already exists"
|
||||||
|
|
||||||
-- | Check whether a given name would conflict with the current schema by doing
|
-- | Check whether a given name would conflict with the current schema by doing
|
||||||
-- an internal introspection
|
-- an internal introspection
|
||||||
@ -196,26 +198,27 @@ trackExistingTableOrViewP2 source tableName isEnum config = do
|
|||||||
pure successMsg
|
pure successMsg
|
||||||
|
|
||||||
runTrackTableQ
|
runTrackTableQ
|
||||||
:: (MonadError QErr m, CacheRWM m, MetadataM m) => TrackTable -> m EncJSON
|
:: (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b) => TrackTable b -> m EncJSON
|
||||||
runTrackTableQ (TrackTable source qt isEnum) = do
|
runTrackTableQ (TrackTable source qt isEnum) = do
|
||||||
trackExistingTableOrViewP1 source qt
|
trackExistingTableOrViewP1 source qt
|
||||||
trackExistingTableOrViewP2 source qt isEnum emptyTableConfig
|
trackExistingTableOrViewP2 source qt isEnum emptyTableConfig
|
||||||
|
|
||||||
data TrackTableV2
|
data TrackTableV2 b
|
||||||
= TrackTableV2
|
= TrackTableV2
|
||||||
{ ttv2Table :: !TrackTable
|
{ ttv2Table :: !(TrackTable b)
|
||||||
, ttv2Configuration :: !(TableConfig 'Postgres)
|
, ttv2Configuration :: !(TableConfig b)
|
||||||
} deriving (Show, Eq)
|
} deriving (Show, Eq, Generic)
|
||||||
$(deriveToJSON hasuraJSON ''TrackTableV2)
|
instance (Backend b) => ToJSON (TrackTableV2 b) where
|
||||||
|
toJSON = genericToJSON hasuraJSON
|
||||||
|
|
||||||
instance FromJSON TrackTableV2 where
|
instance (Backend b) => FromJSON (TrackTableV2 b) where
|
||||||
parseJSON = withObject "Object" $ \o -> do
|
parseJSON = withObject "Object" $ \o -> do
|
||||||
table <- parseJSON $ Object o
|
table <- parseJSON $ Object o
|
||||||
configuration <- o .:? "configuration" .!= emptyTableConfig
|
configuration <- o .:? "configuration" .!= emptyTableConfig
|
||||||
pure $ TrackTableV2 table configuration
|
pure $ TrackTableV2 table configuration
|
||||||
|
|
||||||
runTrackTableV2Q
|
runTrackTableV2Q
|
||||||
:: (MonadError QErr m, CacheRWM m, MetadataM m) => TrackTableV2 -> m EncJSON
|
:: (MonadError QErr m, CacheRWM m, MetadataM m, BackendMetadata b) => TrackTableV2 b -> m EncJSON
|
||||||
runTrackTableV2Q (TrackTableV2 (TrackTable source qt isEnum) config) = do
|
runTrackTableV2Q (TrackTableV2 (TrackTable source qt isEnum) config) = do
|
||||||
trackExistingTableOrViewP1 source qt
|
trackExistingTableOrViewP1 source qt
|
||||||
trackExistingTableOrViewP2 source qt isEnum config
|
trackExistingTableOrViewP2 source qt isEnum config
|
||||||
@ -283,7 +286,7 @@ unTrackExistingTableOrViewP1
|
|||||||
:: forall m b. (CacheRM m, QErrM m, Backend b) => UntrackTable b -> m ()
|
:: forall m b. (CacheRM m, QErrM m, Backend b) => UntrackTable b -> m ()
|
||||||
unTrackExistingTableOrViewP1 (UntrackTable source vn _) = do
|
unTrackExistingTableOrViewP1 (UntrackTable source vn _) = do
|
||||||
schemaCache <- askSchemaCache
|
schemaCache <- askSchemaCache
|
||||||
tableInfo <- unsafeTableInfo @b source vn (scPostgres schemaCache)
|
tableInfo <- unsafeTableInfo @b source vn (scSources schemaCache)
|
||||||
`onNothing` throw400 AlreadyUntracked ("view/table already untracked : " <>> vn)
|
`onNothing` throw400 AlreadyUntracked ("view/table already untracked : " <>> vn)
|
||||||
when (isSystemDefined $ _tciSystemDefined $ _tiCoreInfo tableInfo) $
|
when (isSystemDefined $ _tciSystemDefined $ _tiCoreInfo tableInfo) $
|
||||||
throw400 NotSupported $ vn <<> " is system defined, cannot untrack"
|
throw400 NotSupported $ vn <<> " is system defined, cannot untrack"
|
||||||
|
@ -279,6 +279,7 @@ data OpExpG (b :: BackendType) a
|
|||||||
| CGTE !(Column b)
|
| CGTE !(Column b)
|
||||||
| CLTE !(Column b)
|
| CLTE !(Column b)
|
||||||
deriving (Functor, Foldable, Traversable, Generic)
|
deriving (Functor, Foldable, Traversable, Generic)
|
||||||
|
deriving instance (Backend b, Show a) => Show (OpExpG b a)
|
||||||
deriving instance (Backend b, Eq a) => Eq (OpExpG b a)
|
deriving instance (Backend b, Eq a) => Eq (OpExpG b a)
|
||||||
instance (Backend b, NFData a) => NFData (OpExpG b a)
|
instance (Backend b, NFData a) => NFData (OpExpG b a)
|
||||||
instance (Backend b, Cacheable a) => Cacheable (OpExpG b a)
|
instance (Backend b, Cacheable a) => Cacheable (OpExpG b a)
|
||||||
|
@ -73,22 +73,22 @@ import Hasura.Tracing
|
|||||||
|
|
||||||
|
|
||||||
askSourceInfo
|
askSourceInfo
|
||||||
:: (CacheRM m, MonadError QErr m)
|
:: (CacheRM m, MonadError QErr m, Backend b)
|
||||||
=> SourceName -> m (SourceInfo 'Postgres)
|
=> SourceName -> m (SourceInfo b)
|
||||||
askSourceInfo sourceName = do
|
askSourceInfo sourceName = do
|
||||||
sources <- scPostgres <$> askSchemaCache
|
sources <- scSources <$> askSchemaCache
|
||||||
onNothing (unsafeSourceInfo =<< M.lookup sourceName sources) $
|
onNothing (unsafeSourceInfo =<< M.lookup sourceName sources) $
|
||||||
-- FIXME: this error can also happen for a lookup with the wrong type
|
-- FIXME: this error can also happen for a lookup with the wrong type
|
||||||
throw400 NotExists $ "source with name " <> sourceName <<> " does not exist"
|
throw400 NotExists $ "source with name " <> sourceName <<> " does not exist"
|
||||||
|
|
||||||
askSourceConfig
|
askSourceConfig
|
||||||
:: (CacheRM m, MonadError QErr m)
|
:: (CacheRM m, MonadError QErr m, Backend b)
|
||||||
=> SourceName -> m (SourceConfig 'Postgres)
|
=> SourceName -> m (SourceConfig b)
|
||||||
askSourceConfig = fmap _siConfiguration . askSourceInfo
|
askSourceConfig = fmap _siConfiguration . askSourceInfo
|
||||||
|
|
||||||
askSourceTables :: (Backend b) => CacheRM m => SourceName -> m (TableCache b)
|
askSourceTables :: (Backend b) => CacheRM m => SourceName -> m (TableCache b)
|
||||||
askSourceTables sourceName = do
|
askSourceTables sourceName = do
|
||||||
sources <- scPostgres <$> askSchemaCache
|
sources <- scSources <$> askSchemaCache
|
||||||
pure $ fromMaybe mempty $ unsafeSourceTables =<< M.lookup sourceName sources
|
pure $ fromMaybe mempty $ unsafeSourceTables =<< M.lookup sourceName sources
|
||||||
|
|
||||||
|
|
||||||
@ -97,7 +97,7 @@ askTabInfo
|
|||||||
=> SourceName -> TableName b -> m (TableInfo b)
|
=> SourceName -> TableName b -> m (TableInfo b)
|
||||||
askTabInfo sourceName tableName = do
|
askTabInfo sourceName tableName = do
|
||||||
rawSchemaCache <- askSchemaCache
|
rawSchemaCache <- askSchemaCache
|
||||||
unsafeTableInfo sourceName tableName (scPostgres rawSchemaCache)
|
unsafeTableInfo sourceName tableName (scSources rawSchemaCache)
|
||||||
`onNothing` throw400 NotExists errMsg
|
`onNothing` throw400 NotExists errMsg
|
||||||
where
|
where
|
||||||
errMsg = "table " <> tableName <<> " does not exist in source: " <> sourceNameToText sourceName
|
errMsg = "table " <> tableName <<> " does not exist in source: " <> sourceNameToText sourceName
|
||||||
@ -171,7 +171,7 @@ askTableCache
|
|||||||
:: (QErrM m, CacheRM m, Backend b) => SourceName -> m (TableCache b)
|
:: (QErrM m, CacheRM m, Backend b) => SourceName -> m (TableCache b)
|
||||||
askTableCache sourceName = do
|
askTableCache sourceName = do
|
||||||
schemaCache <- askSchemaCache
|
schemaCache <- askSchemaCache
|
||||||
sourceInfo <- M.lookup sourceName (scPostgres schemaCache)
|
sourceInfo <- M.lookup sourceName (scSources schemaCache)
|
||||||
`onNothing` throw400 NotExists ("source " <> sourceName <<> " does not exist")
|
`onNothing` throw400 NotExists ("source " <> sourceName <<> " does not exist")
|
||||||
unsafeSourceTables sourceInfo
|
unsafeSourceTables sourceInfo
|
||||||
`onNothing` throw400 NotExists ("source " <> sourceName <<> " is not a PG cache")
|
`onNothing` throw400 NotExists ("source " <> sourceName <<> " is not a PG cache")
|
||||||
|
@ -1,27 +1,23 @@
|
|||||||
{-# LANGUAGE AllowAmbiguousTypes #-}
|
{-# LANGUAGE AllowAmbiguousTypes #-}
|
||||||
|
|
||||||
module Hasura.RQL.Types.Backend where
|
module Hasura.RQL.Types.Backend where
|
||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
import qualified Language.GraphQL.Draft.Syntax as G
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
|
|
||||||
import Data.Aeson
|
import Data.Aeson
|
||||||
import Data.Kind (Type)
|
import Data.Kind (Type)
|
||||||
import Data.Text.Extended
|
import Data.Text.Extended
|
||||||
import Data.Typeable
|
import Data.Typeable
|
||||||
|
|
||||||
import qualified Hasura.Backends.Postgres.Connection as PG
|
import Hasura.Incremental (Cacheable)
|
||||||
import qualified Hasura.Backends.Postgres.SQL.DML as PG
|
import Hasura.RQL.DDL.Headers ()
|
||||||
import qualified Hasura.Backends.Postgres.SQL.Types as PG
|
|
||||||
import qualified Hasura.Backends.Postgres.SQL.Value as PG
|
|
||||||
|
|
||||||
import Hasura.Incremental (Cacheable)
|
|
||||||
import Hasura.RQL.DDL.Headers ()
|
|
||||||
import Hasura.RQL.Types.Common
|
|
||||||
import Hasura.RQL.Types.Error
|
import Hasura.RQL.Types.Error
|
||||||
import Hasura.SQL.Backend
|
import Hasura.SQL.Backend
|
||||||
import Hasura.SQL.Types
|
import Hasura.SQL.Types
|
||||||
|
|
||||||
|
|
||||||
type Representable a = (Show a, Eq a, Hashable a, Cacheable a, NFData a, Typeable a)
|
type Representable a = (Show a, Eq a, Hashable a, Cacheable a, NFData a, Typeable a)
|
||||||
|
|
||||||
type SessionVarType b = CollectableType (ScalarType b)
|
type SessionVarType b = CollectableType (ScalarType b)
|
||||||
@ -135,11 +131,11 @@ class
|
|||||||
functionArgScalarType :: FunctionArgType b -> ScalarType b
|
functionArgScalarType :: FunctionArgType b -> ScalarType b
|
||||||
isComparableType :: ScalarType b -> Bool
|
isComparableType :: ScalarType b -> Bool
|
||||||
isNumType :: ScalarType b -> Bool
|
isNumType :: ScalarType b -> Bool
|
||||||
textToScalarType :: Text -> ScalarType b
|
|
||||||
textToScalarValue :: Maybe Text -> ScalarValue b
|
textToScalarValue :: Maybe Text -> ScalarValue b
|
||||||
parseScalarValue :: ScalarType b -> Value -> Either QErr (ScalarValue b)
|
parseScalarValue :: ScalarType b -> Value -> Either QErr (ScalarValue b)
|
||||||
scalarValueToJSON :: ScalarValue b -> Value
|
scalarValueToJSON :: ScalarValue b -> Value
|
||||||
functionToTable :: FunctionName b -> TableName b
|
functionToTable :: FunctionName b -> TableName b
|
||||||
|
tableToFunction :: TableName b -> FunctionName b
|
||||||
|
|
||||||
-- functions on names
|
-- functions on names
|
||||||
tableGraphQLName :: TableName b -> Either QErr G.Name
|
tableGraphQLName :: TableName b -> Either QErr G.Name
|
||||||
@ -148,45 +144,3 @@ class
|
|||||||
|
|
||||||
-- TODO: metadata related functions
|
-- TODO: metadata related functions
|
||||||
snakeCaseTableName :: TableName b -> Text
|
snakeCaseTableName :: TableName b -> Text
|
||||||
|
|
||||||
instance Backend 'Postgres where
|
|
||||||
type SourceConfig 'Postgres = PG.PGSourceConfig
|
|
||||||
type SourceConnConfiguration 'Postgres = PG.PostgresConnConfiguration
|
|
||||||
type Identifier 'Postgres = PG.Identifier
|
|
||||||
type Alias 'Postgres = PG.Alias
|
|
||||||
type TableName 'Postgres = PG.QualifiedTable
|
|
||||||
type FunctionName 'Postgres = PG.QualifiedFunction
|
|
||||||
type FunctionArgType 'Postgres = PG.QualifiedPGType
|
|
||||||
type ConstraintName 'Postgres = PG.ConstraintName
|
|
||||||
type BasicOrderType 'Postgres = PG.OrderType
|
|
||||||
type NullsOrderType 'Postgres = PG.NullsOrder
|
|
||||||
type CountType 'Postgres = PG.CountType
|
|
||||||
type Column 'Postgres = PG.PGCol
|
|
||||||
type ScalarValue 'Postgres = PG.PGScalarValue
|
|
||||||
type ScalarType 'Postgres = PG.PGScalarType
|
|
||||||
type SQLExpression 'Postgres = PG.SQLExp
|
|
||||||
type SQLOperator 'Postgres = PG.SQLOp
|
|
||||||
type XAILIKE 'Postgres = ()
|
|
||||||
type XANILIKE 'Postgres = ()
|
|
||||||
type XComputedField 'Postgres = ()
|
|
||||||
type XRemoteField 'Postgres = ()
|
|
||||||
type XEventTrigger 'Postgres = ()
|
|
||||||
type XRelay 'Postgres = ()
|
|
||||||
type XNodesAgg 'Postgres = ()
|
|
||||||
type XDistinct 'Postgres = ()
|
|
||||||
|
|
||||||
backendTag = PostgresTag
|
|
||||||
functionArgScalarType = PG._qptName
|
|
||||||
isComparableType = PG.isComparableType
|
|
||||||
isNumType = PG.isNumType
|
|
||||||
textToScalarType = PG.textToPGScalarType
|
|
||||||
textToScalarValue = maybe (PG.PGNull PG.PGText) PG.PGValText
|
|
||||||
parseScalarValue ty val = runAesonParser (PG.parsePGValue ty) val
|
|
||||||
scalarValueToJSON = PG.pgScalarValueToJson
|
|
||||||
functionToTable = fmap (PG.TableName . PG.getFunctionTxt)
|
|
||||||
|
|
||||||
tableGraphQLName = PG.qualifiedObjectToName
|
|
||||||
functionGraphQLName = PG.qualifiedObjectToName
|
|
||||||
scalarTypeGraphQLName = runExcept . mkScalarTypeName
|
|
||||||
|
|
||||||
snakeCaseTableName = PG.snakeCaseQualifiedObject
|
|
||||||
|
@ -33,18 +33,20 @@ module Hasura.RQL.Types.Column
|
|||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
import qualified Data.HashMap.Strict as M
|
import qualified Data.HashMap.Strict as M
|
||||||
import qualified Language.GraphQL.Draft.Syntax as G
|
import qualified Language.GraphQL.Draft.Syntax as G
|
||||||
|
|
||||||
import Control.Lens.TH
|
import Control.Lens.TH
|
||||||
import Data.Aeson
|
import Data.Aeson
|
||||||
import Data.Aeson.TH
|
import Data.Aeson.TH
|
||||||
import Data.Text.Extended
|
import Data.Text.Extended
|
||||||
|
|
||||||
import Hasura.Backends.Postgres.SQL.Types hiding (TableName, isComparableType, isNumType)
|
import Hasura.Backends.Postgres.Instances.Types ()
|
||||||
|
import Hasura.Backends.Postgres.SQL.Types hiding (TableName, isComparableType,
|
||||||
|
isNumType)
|
||||||
import Hasura.Backends.Postgres.SQL.Value
|
import Hasura.Backends.Postgres.SQL.Value
|
||||||
import Hasura.Incremental (Cacheable)
|
import Hasura.Incremental (Cacheable)
|
||||||
import Hasura.RQL.Instances ()
|
import Hasura.RQL.Instances ()
|
||||||
import Hasura.RQL.Types.Backend
|
import Hasura.RQL.Types.Backend
|
||||||
import Hasura.RQL.Types.Common
|
import Hasura.RQL.Types.Common
|
||||||
import Hasura.RQL.Types.Error
|
import Hasura.RQL.Types.Error
|
||||||
|
@ -4,21 +4,21 @@ Description: Schema cache types related to computed field
|
|||||||
|
|
||||||
module Hasura.RQL.Types.ComputedField where
|
module Hasura.RQL.Types.ComputedField where
|
||||||
|
|
||||||
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
import qualified Data.Sequence as Seq
|
import qualified Data.Sequence as Seq
|
||||||
import qualified Database.PG.Query as Q
|
import qualified Database.PG.Query as Q
|
||||||
|
|
||||||
import Control.Lens hiding ((.=))
|
import Control.Lens hiding ((.=))
|
||||||
import Data.Aeson
|
import Data.Aeson
|
||||||
import Data.Aeson.Casing
|
import Data.Aeson.Casing
|
||||||
import Data.Aeson.TH
|
import Data.Aeson.TH
|
||||||
import Data.Text.Extended
|
import Data.Text.Extended
|
||||||
import Data.Text.NonEmpty
|
import Data.Text.NonEmpty
|
||||||
|
|
||||||
import Hasura.Backends.Postgres.SQL.Types hiding (FunctionName, TableName)
|
import Hasura.Backends.Postgres.Instances.Types ()
|
||||||
import Hasura.Incremental (Cacheable)
|
import Hasura.Backends.Postgres.SQL.Types hiding (FunctionName, TableName)
|
||||||
|
import Hasura.Incremental (Cacheable)
|
||||||
import Hasura.RQL.Types.Backend
|
import Hasura.RQL.Types.Backend
|
||||||
import Hasura.RQL.Types.Common
|
import Hasura.RQL.Types.Common
|
||||||
import Hasura.RQL.Types.Function
|
import Hasura.RQL.Types.Function
|
||||||
|
@ -68,6 +68,7 @@ data Code
|
|||||||
| NotExists
|
| NotExists
|
||||||
| AlreadyExists
|
| AlreadyExists
|
||||||
| PostgresError
|
| PostgresError
|
||||||
|
| MSSQLError
|
||||||
| DatabaseConnectionTimeout
|
| DatabaseConnectionTimeout
|
||||||
| NotSupported
|
| NotSupported
|
||||||
| DependencyError
|
| DependencyError
|
||||||
@ -124,6 +125,7 @@ instance Show Code where
|
|||||||
AlreadyTracked -> "already-tracked"
|
AlreadyTracked -> "already-tracked"
|
||||||
AlreadyUntracked -> "already-untracked"
|
AlreadyUntracked -> "already-untracked"
|
||||||
PostgresError -> "postgres-error"
|
PostgresError -> "postgres-error"
|
||||||
|
MSSQLError -> "mssql-error"
|
||||||
DatabaseConnectionTimeout -> "connection-timeout-error"
|
DatabaseConnectionTimeout -> "connection-timeout-error"
|
||||||
NotSupported -> "not-supported"
|
NotSupported -> "not-supported"
|
||||||
DependencyError -> "dependency-error"
|
DependencyError -> "dependency-error"
|
||||||
|
@ -274,7 +274,8 @@ instance FromJSON BackendSourceMetadata where
|
|||||||
-- TODO: Make backendKind a concrete type or re-use `BackendType`
|
-- TODO: Make backendKind a concrete type or re-use `BackendType`
|
||||||
case backendKind of
|
case backendKind of
|
||||||
"postgres" -> BackendSourceMetadata @'Postgres <$> parseJSON (Object o)
|
"postgres" -> BackendSourceMetadata @'Postgres <$> parseJSON (Object o)
|
||||||
_ -> fail "accepting only postgres backends now"
|
"mssql" -> BackendSourceMetadata @'MSSQL <$> parseJSON (Object o)
|
||||||
|
_ -> fail "expected postgres or mssql"
|
||||||
|
|
||||||
toSourceMetadata :: (BackendMetadata b) => Prism' BackendSourceMetadata (SourceMetadata b)
|
toSourceMetadata :: (BackendMetadata b) => Prism' BackendSourceMetadata (SourceMetadata b)
|
||||||
toSourceMetadata = prism' BackendSourceMetadata getSourceMetadata
|
toSourceMetadata = prism' BackendSourceMetadata getSourceMetadata
|
||||||
@ -467,14 +468,18 @@ metadataToOrdJSON ( Metadata
|
|||||||
else Just ("metrics_config", AO.toOrdered metricsConfig)
|
else Just ("metrics_config", AO.toOrdered metricsConfig)
|
||||||
|
|
||||||
sourceMetaToOrdJSON :: BackendSourceMetadata -> AO.Value
|
sourceMetaToOrdJSON :: BackendSourceMetadata -> AO.Value
|
||||||
sourceMetaToOrdJSON (BackendSourceMetadata SourceMetadata{..}) =
|
sourceMetaToOrdJSON (BackendSourceMetadata (SourceMetadata{..} :: SourceMetadata b)) =
|
||||||
let sourceNamePair = ("name", AO.toOrdered _smName)
|
let sourceNamePair = ("name", AO.toOrdered _smName)
|
||||||
|
sourceKind = case backendTag @b of
|
||||||
|
PostgresTag -> "postgres"
|
||||||
|
MSSQLTag -> "mssql"
|
||||||
|
sourceKindPair = ("kind", AO.String sourceKind)
|
||||||
tablesPair = ("tables", AO.array $ map tableMetaToOrdJSON $ sortOn _tmTable $ OM.elems _smTables)
|
tablesPair = ("tables", AO.array $ map tableMetaToOrdJSON $ sortOn _tmTable $ OM.elems _smTables)
|
||||||
functionsPair = listToMaybeOrdPairSort "functions" functionMetadataToOrdJSON _fmFunction _smFunctions
|
functionsPair = listToMaybeOrdPairSort "functions" functionMetadataToOrdJSON _fmFunction _smFunctions
|
||||||
|
|
||||||
configurationPair = [("configuration", AO.toOrdered _smConfiguration)]
|
configurationPair = [("configuration", AO.toOrdered _smConfiguration)]
|
||||||
|
|
||||||
in AO.object $ [sourceNamePair, tablesPair] <> maybeToList functionsPair <> configurationPair
|
in AO.object $ [sourceNamePair, sourceKindPair, tablesPair] <> maybeToList functionsPair <> configurationPair
|
||||||
|
|
||||||
tableMetaToOrdJSON :: (Backend b) => TableMetadata b -> AO.Value
|
tableMetaToOrdJSON :: (Backend b) => TableMetadata b -> AO.Value
|
||||||
tableMetaToOrdJSON ( TableMetadata
|
tableMetaToOrdJSON ( TableMetadata
|
||||||
|
@ -25,6 +25,8 @@ import Hasura.Server.Types
|
|||||||
|
|
||||||
import qualified Hasura.Backends.Postgres.DDL as PG
|
import qualified Hasura.Backends.Postgres.DDL as PG
|
||||||
|
|
||||||
|
import qualified Hasura.Backends.MSSQL.DDL as MSSQL
|
||||||
|
|
||||||
class (Backend b) => BackendMetadata (b :: BackendType) where
|
class (Backend b) => BackendMetadata (b :: BackendType) where
|
||||||
|
|
||||||
buildComputedFieldInfo
|
buildComputedFieldInfo
|
||||||
@ -116,6 +118,11 @@ class (Backend b) => BackendMetadata (b :: BackendType) where
|
|||||||
-> Value
|
-> Value
|
||||||
-> m (PartialSQLExp b)
|
-> m (PartialSQLExp b)
|
||||||
|
|
||||||
|
postDropSourceHook
|
||||||
|
:: (MonadError QErr m, MonadIO m, MonadBaseControl IO m)
|
||||||
|
=> SourceConfig b
|
||||||
|
-> m ()
|
||||||
|
|
||||||
instance BackendMetadata 'Postgres where
|
instance BackendMetadata 'Postgres where
|
||||||
buildComputedFieldInfo = PG.buildComputedFieldInfo
|
buildComputedFieldInfo = PG.buildComputedFieldInfo
|
||||||
buildRemoteFieldInfo = PG.buildRemoteFieldInfo
|
buildRemoteFieldInfo = PG.buildRemoteFieldInfo
|
||||||
@ -128,3 +135,18 @@ instance BackendMetadata 'Postgres where
|
|||||||
buildFunctionInfo = PG.buildFunctionInfo
|
buildFunctionInfo = PG.buildFunctionInfo
|
||||||
updateColumnInEventTrigger = PG.updateColumnInEventTrigger
|
updateColumnInEventTrigger = PG.updateColumnInEventTrigger
|
||||||
parseCollectableType = PG.parseCollectableType
|
parseCollectableType = PG.parseCollectableType
|
||||||
|
postDropSourceHook = PG.postDropSourceHook
|
||||||
|
|
||||||
|
instance BackendMetadata 'MSSQL where
|
||||||
|
buildComputedFieldInfo = MSSQL.buildComputedFieldInfo
|
||||||
|
buildRemoteFieldInfo = MSSQL.buildRemoteFieldInfo
|
||||||
|
fetchAndValidateEnumValues = MSSQL.fetchAndValidateEnumValues
|
||||||
|
resolveSourceConfig = MSSQL.resolveSourceConfig
|
||||||
|
resolveDatabaseMetadata = MSSQL.resolveDatabaseMetadata
|
||||||
|
createTableEventTrigger = MSSQL.createTableEventTrigger
|
||||||
|
buildEventTriggerInfo = MSSQL.buildEventTriggerInfo
|
||||||
|
parseBoolExpOperations = MSSQL.parseBoolExpOperations
|
||||||
|
buildFunctionInfo = MSSQL.buildFunctionInfo
|
||||||
|
updateColumnInEventTrigger = MSSQL.updateColumnInEventTrigger
|
||||||
|
parseCollectableType = MSSQL.parseCollectableType
|
||||||
|
postDropSourceHook = MSSQL.postDropSourceHook
|
||||||
|
@ -1,17 +1,20 @@
|
|||||||
module Hasura.RQL.Types.Relationship where
|
module Hasura.RQL.Types.Relationship where
|
||||||
|
|
||||||
import Hasura.Incremental (Cacheable)
|
|
||||||
import Hasura.Prelude
|
import Hasura.Prelude
|
||||||
|
|
||||||
import Control.Lens (makeLenses)
|
import qualified Data.HashMap.Strict as HM
|
||||||
|
import qualified Data.Text as T
|
||||||
|
|
||||||
|
import Control.Lens (makeLenses)
|
||||||
import Data.Aeson.TH
|
import Data.Aeson.TH
|
||||||
import Data.Aeson.Types
|
import Data.Aeson.Types
|
||||||
|
|
||||||
|
import Hasura.Backends.Postgres.Instances.Types ()
|
||||||
|
import Hasura.Incremental (Cacheable)
|
||||||
import Hasura.RQL.Types.Backend
|
import Hasura.RQL.Types.Backend
|
||||||
import Hasura.RQL.Types.Common
|
import Hasura.RQL.Types.Common
|
||||||
import Hasura.SQL.Backend
|
import Hasura.SQL.Backend
|
||||||
|
|
||||||
import qualified Data.HashMap.Strict as HM
|
|
||||||
import qualified Data.Text as T
|
|
||||||
|
|
||||||
data RelDef a
|
data RelDef a
|
||||||
= RelDef
|
= RelDef
|
||||||
@ -129,16 +132,19 @@ type ObjRelUsing b = RelUsing b (Column b)
|
|||||||
type ObjRelDef b = RelDef (ObjRelUsing b)
|
type ObjRelDef b = RelDef (ObjRelUsing b)
|
||||||
type CreateObjRel b = WithTable b (ObjRelDef b)
|
type CreateObjRel b = WithTable b (ObjRelDef b)
|
||||||
|
|
||||||
data DropRel
|
data DropRel b
|
||||||
= DropRel
|
= DropRel
|
||||||
{ drSource :: !SourceName
|
{ drSource :: !SourceName
|
||||||
, drTable :: !(TableName 'Postgres)
|
, drTable :: !(TableName b)
|
||||||
, drRelationship :: !RelName
|
, drRelationship :: !RelName
|
||||||
, drCascade :: !Bool
|
, drCascade :: !Bool
|
||||||
} deriving (Show, Eq)
|
} deriving (Generic)
|
||||||
$(deriveToJSON hasuraJSON{omitNothingFields=True} ''DropRel)
|
deriving instance (Backend b) => Show (DropRel b)
|
||||||
|
deriving instance (Backend b) => Eq (DropRel b)
|
||||||
|
instance (Backend b) => ToJSON (DropRel b) where
|
||||||
|
toJSON = genericToJSON hasuraJSON{omitNothingFields = True}
|
||||||
|
|
||||||
instance FromJSON DropRel where
|
instance (Backend b) => FromJSON (DropRel b) where
|
||||||
parseJSON = withObject "Object" $ \o ->
|
parseJSON = withObject "Object" $ \o ->
|
||||||
DropRel
|
DropRel
|
||||||
<$> o .:? "source" .!= defaultSource
|
<$> o .:? "source" .!= defaultSource
|
||||||
@ -146,15 +152,19 @@ instance FromJSON DropRel where
|
|||||||
<*> o .: "relationship"
|
<*> o .: "relationship"
|
||||||
<*> o .:? "cascade" .!= False
|
<*> o .:? "cascade" .!= False
|
||||||
|
|
||||||
data SetRelComment
|
data SetRelComment b
|
||||||
= SetRelComment
|
= SetRelComment
|
||||||
{ arSource :: !SourceName
|
{ arSource :: !SourceName
|
||||||
, arTable :: !(TableName 'Postgres)
|
, arTable :: !(TableName b)
|
||||||
, arRelationship :: !RelName
|
, arRelationship :: !RelName
|
||||||
, arComment :: !(Maybe T.Text)
|
, arComment :: !(Maybe T.Text)
|
||||||
} deriving (Show, Eq)
|
} deriving (Generic)
|
||||||
$(deriveToJSON hasuraJSON{omitNothingFields=True} ''SetRelComment)
|
deriving instance (Backend b) => Show (SetRelComment b)
|
||||||
instance FromJSON SetRelComment where
|
deriving instance (Backend b) => Eq (SetRelComment b)
|
||||||
|
instance (Backend b) => ToJSON (SetRelComment b) where
|
||||||
|
toJSON = genericToJSON hasuraJSON{omitNothingFields = True}
|
||||||
|
|
||||||
|
instance (Backend b) => FromJSON (SetRelComment b) where
|
||||||
parseJSON = withObject "Object" $ \o ->
|
parseJSON = withObject "Object" $ \o ->
|
||||||
SetRelComment
|
SetRelComment
|
||||||
<$> o .:? "source" .!= defaultSource
|
<$> o .:? "source" .!= defaultSource
|
||||||
@ -162,16 +172,19 @@ instance FromJSON SetRelComment where
|
|||||||
<*> o .: "relationship"
|
<*> o .: "relationship"
|
||||||
<*> o .:? "comment"
|
<*> o .:? "comment"
|
||||||
|
|
||||||
data RenameRel
|
data RenameRel b
|
||||||
= RenameRel
|
= RenameRel
|
||||||
{ rrSource :: !SourceName
|
{ rrSource :: !SourceName
|
||||||
, rrTable :: !(TableName 'Postgres)
|
, rrTable :: !(TableName b)
|
||||||
, rrName :: !RelName
|
, rrName :: !RelName
|
||||||
, rrNewName :: !RelName
|
, rrNewName :: !RelName
|
||||||
} deriving (Show, Eq)
|
} deriving (Generic)
|
||||||
$(deriveToJSON hasuraJSON ''RenameRel)
|
deriving instance (Backend b) => Show (RenameRel b)
|
||||||
|
deriving instance (Backend b) => Eq (RenameRel b)
|
||||||
|
instance (Backend b) => ToJSON (RenameRel b) where
|
||||||
|
toJSON = genericToJSON hasuraJSON
|
||||||
|
|
||||||
instance FromJSON RenameRel where
|
instance (Backend b) => FromJSON (RenameRel b) where
|
||||||
parseJSON = withObject "Object" $ \o ->
|
parseJSON = withObject "Object" $ \o ->
|
||||||
RenameRel
|
RenameRel
|
||||||
<$> o .:? "source" .!= defaultSource
|
<$> o .:? "source" .!= defaultSource
|
||||||
|
@ -259,26 +259,26 @@ type ActionCache = M.HashMap ActionName ActionInfo -- info of all actions
|
|||||||
unsafeFunctionCache
|
unsafeFunctionCache
|
||||||
:: forall b. Backend b => SourceName -> SourceCache -> Maybe (FunctionCache b)
|
:: forall b. Backend b => SourceName -> SourceCache -> Maybe (FunctionCache b)
|
||||||
unsafeFunctionCache sourceName cache =
|
unsafeFunctionCache sourceName cache =
|
||||||
unsafeSourceFunctions =<< M.lookup sourceName cache
|
unsafeSourceFunctions @b =<< M.lookup sourceName cache
|
||||||
|
|
||||||
unsafeFunctionInfo
|
unsafeFunctionInfo
|
||||||
:: forall b. Backend b => SourceName -> FunctionName b -> SourceCache -> Maybe (FunctionInfo b)
|
:: forall b. Backend b => SourceName -> FunctionName b -> SourceCache -> Maybe (FunctionInfo b)
|
||||||
unsafeFunctionInfo sourceName functionName cache =
|
unsafeFunctionInfo sourceName functionName cache =
|
||||||
M.lookup functionName =<< unsafeFunctionCache sourceName cache
|
M.lookup functionName =<< unsafeFunctionCache @b sourceName cache
|
||||||
|
|
||||||
unsafeTableCache
|
unsafeTableCache
|
||||||
:: forall b. Backend b => SourceName -> SourceCache -> Maybe (TableCache b)
|
:: forall b. Backend b => SourceName -> SourceCache -> Maybe (TableCache b)
|
||||||
unsafeTableCache sourceName cache = do
|
unsafeTableCache sourceName cache = do
|
||||||
unsafeSourceTables =<< M.lookup sourceName cache
|
unsafeSourceTables @b =<< M.lookup sourceName cache
|
||||||
|
|
||||||
unsafeTableInfo
|
unsafeTableInfo
|
||||||
:: forall b. Backend b => SourceName -> TableName b -> SourceCache -> Maybe (TableInfo b)
|
:: forall b. Backend b => SourceName -> TableName b -> SourceCache -> Maybe (TableInfo b)
|
||||||
unsafeTableInfo sourceName tableName cache =
|
unsafeTableInfo sourceName tableName cache =
|
||||||
M.lookup tableName =<< unsafeTableCache sourceName cache
|
M.lookup tableName =<< unsafeTableCache @b sourceName cache
|
||||||
|
|
||||||
data SchemaCache
|
data SchemaCache
|
||||||
= SchemaCache
|
= SchemaCache
|
||||||
{ scPostgres :: !SourceCache
|
{ scSources :: !SourceCache
|
||||||
, scActions :: !ActionCache
|
, scActions :: !ActionCache
|
||||||
, scRemoteSchemas :: !RemoteSchemaMap
|
, scRemoteSchemas :: !RemoteSchemaMap
|
||||||
, scAllowlist :: !(HS.HashSet GQLQuery)
|
, scAllowlist :: !(HS.HashSet GQLQuery)
|
||||||
|
@ -55,10 +55,10 @@ unsafeSourceName :: BackendSourceInfo -> SourceName
|
|||||||
unsafeSourceName (BackendSourceInfo (SourceInfo name _ _ _)) = name
|
unsafeSourceName (BackendSourceInfo (SourceInfo name _ _ _)) = name
|
||||||
|
|
||||||
unsafeSourceTables :: forall b. Backend b => BackendSourceInfo -> Maybe (TableCache b)
|
unsafeSourceTables :: forall b. Backend b => BackendSourceInfo -> Maybe (TableCache b)
|
||||||
unsafeSourceTables = fmap _siTables . unsafeSourceInfo
|
unsafeSourceTables = fmap _siTables . unsafeSourceInfo @b
|
||||||
|
|
||||||
unsafeSourceFunctions :: forall b. Backend b => BackendSourceInfo -> Maybe (FunctionCache b)
|
unsafeSourceFunctions :: forall b. Backend b => BackendSourceInfo -> Maybe (FunctionCache b)
|
||||||
unsafeSourceFunctions = fmap _siFunctions . unsafeSourceInfo
|
unsafeSourceFunctions = fmap _siFunctions . unsafeSourceInfo @b
|
||||||
|
|
||||||
unsafeSourceConfiguration :: forall b. Backend b => BackendSourceInfo -> Maybe (SourceConfig b)
|
unsafeSourceConfiguration :: forall b. Backend b => BackendSourceInfo -> Maybe (SourceConfig b)
|
||||||
unsafeSourceConfiguration = fmap _siConfiguration . unsafeSourceInfo @b
|
unsafeSourceConfiguration = fmap _siConfiguration . unsafeSourceInfo @b
|
||||||
@ -99,23 +99,30 @@ instance (MonadResolveSource m) => MonadResolveSource (LazyTxT QErr m) where
|
|||||||
getSourceResolver = lift getSourceResolver
|
getSourceResolver = lift getSourceResolver
|
||||||
|
|
||||||
-- Metadata API related types
|
-- Metadata API related types
|
||||||
data AddPgSource
|
data AddSource b
|
||||||
= AddPgSource
|
= AddSource
|
||||||
{ _apsName :: !SourceName
|
{ _asName :: !SourceName
|
||||||
, _apsConfiguration :: !PostgresConnConfiguration
|
, _asConfiguration :: !(SourceConnConfiguration b)
|
||||||
} deriving (Show, Eq)
|
} deriving (Generic)
|
||||||
$(deriveJSON hasuraJSON ''AddPgSource)
|
deriving instance (Backend b) => Show (AddSource b)
|
||||||
|
deriving instance (Backend b) => Eq (AddSource b)
|
||||||
|
|
||||||
data DropPgSource
|
instance (Backend b) => ToJSON (AddSource b) where
|
||||||
= DropPgSource
|
toJSON = genericToJSON hasuraJSON
|
||||||
{ _dpsName :: !SourceName
|
|
||||||
, _dpsCascade :: !Bool
|
|
||||||
} deriving (Show, Eq)
|
|
||||||
$(deriveToJSON hasuraJSON ''DropPgSource)
|
|
||||||
|
|
||||||
instance FromJSON DropPgSource where
|
instance (Backend b) => FromJSON (AddSource b) where
|
||||||
|
parseJSON = genericParseJSON hasuraJSON
|
||||||
|
|
||||||
|
data DropSource
|
||||||
|
= DropSource
|
||||||
|
{ _dsName :: !SourceName
|
||||||
|
, _dsCascade :: !Bool
|
||||||
|
} deriving (Show, Eq)
|
||||||
|
$(deriveToJSON hasuraJSON ''DropSource)
|
||||||
|
|
||||||
|
instance FromJSON DropSource where
|
||||||
parseJSON = withObject "Object" $ \o ->
|
parseJSON = withObject "Object" $ \o ->
|
||||||
DropPgSource <$> o .: "name" <*> o .:? "cascade" .!= False
|
DropSource <$> o .: "name" <*> o .:? "cascade" .!= False
|
||||||
|
|
||||||
newtype PostgresSourceName =
|
newtype PostgresSourceName =
|
||||||
PostgresSourceName {_psnName :: SourceName}
|
PostgresSourceName {_psnName :: SourceName}
|
||||||
|
@ -8,7 +8,7 @@ import Unsafe.Coerce
|
|||||||
|
|
||||||
|
|
||||||
-- | An enum that represents each backend we support.
|
-- | An enum that represents each backend we support.
|
||||||
data BackendType = Postgres
|
data BackendType = Postgres | MSSQL
|
||||||
deriving (Show, Eq, Ord, Bounded, Enum)
|
deriving (Show, Eq, Ord, Bounded, Enum)
|
||||||
|
|
||||||
|
|
||||||
@ -16,15 +16,18 @@ data BackendType = Postgres
|
|||||||
-- It must contain one tag per backend in @BackendType@.
|
-- It must contain one tag per backend in @BackendType@.
|
||||||
data BackendTag (b :: BackendType) where
|
data BackendTag (b :: BackendType) where
|
||||||
PostgresTag :: BackendTag 'Postgres
|
PostgresTag :: BackendTag 'Postgres
|
||||||
|
MSSQLTag :: BackendTag 'MSSQL
|
||||||
|
|
||||||
|
|
||||||
-- | How to convert back from a tag to a runtime value.
|
-- | How to convert back from a tag to a runtime value.
|
||||||
reify :: BackendTag b -> BackendType
|
reify :: BackendTag b -> BackendType
|
||||||
reify PostgresTag = Postgres
|
reify = \case
|
||||||
|
PostgresTag -> Postgres
|
||||||
|
MSSQLTag -> MSSQL
|
||||||
|
|
||||||
|
|
||||||
-- We need those instances to be able to use a @BackendTag@ as a key in a
|
-- We need those instances to be able to use a @BackendTag@ as a key in a
|
||||||
-- dependent map. Using @BackendType@ as a data kind, makes it difficult to use
|
-- dependent map. Using @BackendType@ as a data kind, makes it difficult to use
|
||||||
-- @Typeable@, hence the reliance on `unsafeCoerce`.
|
-- @Typeable@, hence the reliance on `unsafeCoerce`.
|
||||||
instance GEq BackendTag where
|
instance GEq BackendTag where
|
||||||
geq b1 b2
|
geq b1 b2
|
||||||
|
@ -41,10 +41,10 @@ import Hasura.Session
|
|||||||
|
|
||||||
|
|
||||||
data RQLMetadataV1
|
data RQLMetadataV1
|
||||||
= RMPgAddSource !AddPgSource
|
= RMPgAddSource !(AddSource 'Postgres)
|
||||||
| RMPgDropSource !DropPgSource
|
| RMPgDropSource !DropSource
|
||||||
|
|
||||||
| RMPgTrackTable !TrackTableV2
|
| RMPgTrackTable !(TrackTableV2 'Postgres)
|
||||||
| RMPgUntrackTable !(UntrackTable 'Postgres)
|
| RMPgUntrackTable !(UntrackTable 'Postgres)
|
||||||
| RMPgSetTableIsEnum !SetTableIsEnum
|
| RMPgSetTableIsEnum !SetTableIsEnum
|
||||||
| RMPgSetTableCustomization !SetTableCustomization
|
| RMPgSetTableCustomization !SetTableCustomization
|
||||||
@ -60,9 +60,9 @@ data RQLMetadataV1
|
|||||||
-- Postgres table relationships
|
-- Postgres table relationships
|
||||||
| RMPgCreateObjectRelationship !(CreateObjRel 'Postgres)
|
| RMPgCreateObjectRelationship !(CreateObjRel 'Postgres)
|
||||||
| RMPgCreateArrayRelationship !(CreateArrRel 'Postgres)
|
| RMPgCreateArrayRelationship !(CreateArrRel 'Postgres)
|
||||||
| RMPgDropRelationship !DropRel
|
| RMPgDropRelationship !(DropRel 'Postgres)
|
||||||
| RMPgSetRelationshipComment !SetRelComment
|
| RMPgSetRelationshipComment !(SetRelComment 'Postgres)
|
||||||
| RMPgRenameRelationship !RenameRel
|
| RMPgRenameRelationship !(RenameRel 'Postgres)
|
||||||
|
|
||||||
-- Postgres computed fields
|
-- Postgres computed fields
|
||||||
| RMPgAddComputedField !(AddComputedField 'Postgres)
|
| RMPgAddComputedField !(AddComputedField 'Postgres)
|
||||||
@ -83,7 +83,7 @@ data RQLMetadataV1
|
|||||||
| RMPgDropSelectPermission !(DropPerm 'Postgres (SelPerm 'Postgres))
|
| RMPgDropSelectPermission !(DropPerm 'Postgres (SelPerm 'Postgres))
|
||||||
| RMPgDropUpdatePermission !(DropPerm 'Postgres (UpdPerm 'Postgres))
|
| RMPgDropUpdatePermission !(DropPerm 'Postgres (UpdPerm 'Postgres))
|
||||||
| RMPgDropDeletePermission !(DropPerm 'Postgres (DelPerm 'Postgres))
|
| RMPgDropDeletePermission !(DropPerm 'Postgres (DelPerm 'Postgres))
|
||||||
| RMPgSetPermissionComment !SetPermComment
|
| RMPgSetPermissionComment !(SetPermComment 'Postgres)
|
||||||
|
|
||||||
-- Postgres tables event triggers
|
-- Postgres tables event triggers
|
||||||
| RMPgCreateEventTrigger !CreateEventTriggerQuery
|
| RMPgCreateEventTrigger !CreateEventTriggerQuery
|
||||||
@ -91,6 +91,29 @@ data RQLMetadataV1
|
|||||||
| RMPgRedeliverEvent !RedeliverEventQuery
|
| RMPgRedeliverEvent !RedeliverEventQuery
|
||||||
| RMPgInvokeEventTrigger !InvokeEventTriggerQuery
|
| RMPgInvokeEventTrigger !InvokeEventTriggerQuery
|
||||||
|
|
||||||
|
-- MSSQL sources
|
||||||
|
| RMMssqlAddSource !(AddSource 'MSSQL)
|
||||||
|
| RMMssqlDropSource !DropSource
|
||||||
|
| RMMssqlTrackTable !(TrackTableV2 'MSSQL)
|
||||||
|
| RMMssqlUntrackTable !(UntrackTable 'MSSQL)
|
||||||
|
|
||||||
|
| RMMssqlCreateObjectRelationship !(CreateObjRel 'MSSQL)
|
||||||
|
| RMMssqlCreateArrayRelationship !(CreateArrRel 'MSSQL)
|
||||||
|
| RMMssqlDropRelationship !(DropRel 'MSSQL)
|
||||||
|
| RMMssqlSetRelationshipComment !(SetRelComment 'MSSQL)
|
||||||
|
| RMMssqlRenameRelationship !(RenameRel 'MSSQL)
|
||||||
|
|
||||||
|
| RMMssqlCreateInsertPermission !(CreateInsPerm 'MSSQL)
|
||||||
|
| RMMssqlCreateSelectPermission !(CreateSelPerm 'MSSQL)
|
||||||
|
| RMMssqlCreateUpdatePermission !(CreateUpdPerm 'MSSQL)
|
||||||
|
| RMMssqlCreateDeletePermission !(CreateDelPerm 'MSSQL)
|
||||||
|
|
||||||
|
| RMMssqlDropInsertPermission !(DropPerm 'MSSQL (InsPerm 'MSSQL))
|
||||||
|
| RMMssqlDropSelectPermission !(DropPerm 'MSSQL (SelPerm 'MSSQL))
|
||||||
|
| RMMssqlDropUpdatePermission !(DropPerm 'MSSQL (UpdPerm 'MSSQL))
|
||||||
|
| RMMssqlDropDeletePermission !(DropPerm 'MSSQL (DelPerm 'MSSQL))
|
||||||
|
| RMMssqlSetPermissionComment !(SetPermComment 'MSSQL)
|
||||||
|
|
||||||
-- Inconsistent metadata
|
-- Inconsistent metadata
|
||||||
| RMGetInconsistentMetadata !GetInconsistentMetadata
|
| RMGetInconsistentMetadata !GetInconsistentMetadata
|
||||||
| RMDropInconsistentMetadata !DropInconsistentMetadata
|
| RMDropInconsistentMetadata !DropInconsistentMetadata
|
||||||
@ -312,102 +335,124 @@ runMetadataQueryV1M
|
|||||||
-> RQLMetadataV1
|
-> RQLMetadataV1
|
||||||
-> m EncJSON
|
-> m EncJSON
|
||||||
runMetadataQueryV1M env currentResourceVersion = \case
|
runMetadataQueryV1M env currentResourceVersion = \case
|
||||||
RMPgAddSource q -> runAddPgSource q
|
RMPgAddSource q -> runAddSource q
|
||||||
RMPgDropSource q -> runDropPgSource q
|
RMPgDropSource q -> runDropSource q
|
||||||
|
|
||||||
RMPgTrackTable q -> runTrackTableV2Q q
|
RMPgTrackTable q -> runTrackTableV2Q q
|
||||||
RMPgUntrackTable q -> runUntrackTableQ q
|
RMPgUntrackTable q -> runUntrackTableQ q
|
||||||
RMPgSetTableIsEnum q -> runSetExistingTableIsEnumQ q
|
RMPgSetTableIsEnum q -> runSetExistingTableIsEnumQ q
|
||||||
RMPgSetTableCustomization q -> runSetTableCustomization q
|
RMPgSetTableCustomization q -> runSetTableCustomization q
|
||||||
|
|
||||||
RMPgTrackFunction q -> runTrackFunctionV2 q
|
RMPgTrackFunction q -> runTrackFunctionV2 q
|
||||||
RMPgUntrackFunction q -> runUntrackFunc q
|
RMPgUntrackFunction q -> runUntrackFunc q
|
||||||
|
|
||||||
RMPgCreateFunctionPermission q -> runCreateFunctionPermission q
|
RMPgCreateFunctionPermission q -> runCreateFunctionPermission q
|
||||||
RMPgDropFunctionPermission q -> runDropFunctionPermission q
|
RMPgDropFunctionPermission q -> runDropFunctionPermission q
|
||||||
|
|
||||||
RMPgCreateObjectRelationship q -> runCreateRelationship ObjRel q
|
RMPgCreateObjectRelationship q -> runCreateRelationship ObjRel q
|
||||||
RMPgCreateArrayRelationship q -> runCreateRelationship ArrRel q
|
RMPgCreateArrayRelationship q -> runCreateRelationship ArrRel q
|
||||||
RMPgDropRelationship q -> runDropRel q
|
RMPgDropRelationship q -> runDropRel q
|
||||||
RMPgSetRelationshipComment q -> runSetRelComment q
|
RMPgSetRelationshipComment q -> runSetRelComment q
|
||||||
RMPgRenameRelationship q -> runRenameRel q
|
RMPgRenameRelationship q -> runRenameRel q
|
||||||
|
|
||||||
RMPgAddComputedField q -> runAddComputedField q
|
RMPgAddComputedField q -> runAddComputedField q
|
||||||
RMPgDropComputedField q -> runDropComputedField q
|
RMPgDropComputedField q -> runDropComputedField q
|
||||||
|
|
||||||
RMPgCreateRemoteRelationship q -> runCreateRemoteRelationship q
|
RMPgCreateRemoteRelationship q -> runCreateRemoteRelationship q
|
||||||
RMPgUpdateRemoteRelationship q -> runUpdateRemoteRelationship q
|
RMPgUpdateRemoteRelationship q -> runUpdateRemoteRelationship q
|
||||||
RMPgDeleteRemoteRelationship q -> runDeleteRemoteRelationship q
|
RMPgDeleteRemoteRelationship q -> runDeleteRemoteRelationship q
|
||||||
|
|
||||||
RMPgCreateInsertPermission q -> runCreatePerm q
|
RMPgCreateInsertPermission q -> runCreatePerm q
|
||||||
RMPgCreateSelectPermission q -> runCreatePerm q
|
RMPgCreateSelectPermission q -> runCreatePerm q
|
||||||
RMPgCreateUpdatePermission q -> runCreatePerm q
|
RMPgCreateUpdatePermission q -> runCreatePerm q
|
||||||
RMPgCreateDeletePermission q -> runCreatePerm q
|
RMPgCreateDeletePermission q -> runCreatePerm q
|
||||||
|
|
||||||
RMPgDropInsertPermission q -> runDropPerm q
|
RMPgDropInsertPermission q -> runDropPerm q
|
||||||
RMPgDropSelectPermission q -> runDropPerm q
|
RMPgDropSelectPermission q -> runDropPerm q
|
||||||
RMPgDropUpdatePermission q -> runDropPerm q
|
RMPgDropUpdatePermission q -> runDropPerm q
|
||||||
RMPgDropDeletePermission q -> runDropPerm q
|
RMPgDropDeletePermission q -> runDropPerm q
|
||||||
RMPgSetPermissionComment q -> runSetPermComment q
|
RMPgSetPermissionComment q -> runSetPermComment q
|
||||||
|
|
||||||
RMPgCreateEventTrigger q -> runCreateEventTriggerQuery q
|
RMPgCreateEventTrigger q -> runCreateEventTriggerQuery q
|
||||||
RMPgDeleteEventTrigger q -> runDeleteEventTriggerQuery q
|
RMPgDeleteEventTrigger q -> runDeleteEventTriggerQuery q
|
||||||
RMPgRedeliverEvent q -> runRedeliverEvent q
|
RMPgRedeliverEvent q -> runRedeliverEvent q
|
||||||
RMPgInvokeEventTrigger q -> runInvokeEventTrigger q
|
RMPgInvokeEventTrigger q -> runInvokeEventTrigger q
|
||||||
|
|
||||||
RMGetInconsistentMetadata q -> runGetInconsistentMetadata q
|
RMMssqlAddSource q -> runAddSource q
|
||||||
RMDropInconsistentMetadata q -> runDropInconsistentMetadata q
|
RMMssqlDropSource q -> runDropSource q
|
||||||
|
RMMssqlTrackTable q -> runTrackTableV2Q q
|
||||||
|
RMMssqlUntrackTable q -> runUntrackTableQ q
|
||||||
|
|
||||||
RMAddRemoteSchema q -> runAddRemoteSchema env q
|
RMMssqlCreateObjectRelationship q -> runCreateRelationship ObjRel q
|
||||||
RMRemoveRemoteSchema q -> runRemoveRemoteSchema q
|
RMMssqlCreateArrayRelationship q -> runCreateRelationship ArrRel q
|
||||||
RMReloadRemoteSchema q -> runReloadRemoteSchema q
|
RMMssqlDropRelationship q -> runDropRel q
|
||||||
RMIntrospectRemoteSchema q -> runIntrospectRemoteSchema q
|
RMMssqlSetRelationshipComment q -> runSetRelComment q
|
||||||
|
RMMssqlRenameRelationship q -> runRenameRel q
|
||||||
|
|
||||||
RMAddRemoteSchemaPermissions q -> runAddRemoteSchemaPermissions q
|
RMMssqlCreateInsertPermission q -> runCreatePerm q
|
||||||
RMDropRemoteSchemaPermissions q -> runDropRemoteSchemaPermissions q
|
RMMssqlCreateSelectPermission q -> runCreatePerm q
|
||||||
|
RMMssqlCreateUpdatePermission q -> runCreatePerm q
|
||||||
|
RMMssqlCreateDeletePermission q -> runCreatePerm q
|
||||||
|
|
||||||
RMCreateCronTrigger q -> runCreateCronTrigger q
|
RMMssqlDropInsertPermission q -> runDropPerm q
|
||||||
RMDeleteCronTrigger q -> runDeleteCronTrigger q
|
RMMssqlDropSelectPermission q -> runDropPerm q
|
||||||
RMCreateScheduledEvent q -> runCreateScheduledEvent q
|
RMMssqlDropUpdatePermission q -> runDropPerm q
|
||||||
RMDeleteScheduledEvent q -> runDeleteScheduledEvent q
|
RMMssqlDropDeletePermission q -> runDropPerm q
|
||||||
RMGetScheduledEvents q -> runGetScheduledEvents q
|
RMMssqlSetPermissionComment q -> runSetPermComment q
|
||||||
RMGetEventInvocations q -> runGetEventInvocations q
|
|
||||||
|
|
||||||
RMCreateQueryCollection q -> runCreateCollection q
|
RMGetInconsistentMetadata q -> runGetInconsistentMetadata q
|
||||||
RMDropQueryCollection q -> runDropCollection q
|
RMDropInconsistentMetadata q -> runDropInconsistentMetadata q
|
||||||
RMAddQueryToCollection q -> runAddQueryToCollection q
|
|
||||||
RMDropQueryFromCollection q -> runDropQueryFromCollection q
|
|
||||||
RMAddCollectionToAllowlist q -> runAddCollectionToAllowlist q
|
|
||||||
RMDropCollectionFromAllowlist q -> runDropCollectionFromAllowlist q
|
|
||||||
|
|
||||||
RMReplaceMetadata q -> runReplaceMetadata q
|
RMAddRemoteSchema q -> runAddRemoteSchema env q
|
||||||
RMExportMetadata q -> runExportMetadata q
|
RMRemoveRemoteSchema q -> runRemoveRemoteSchema q
|
||||||
RMClearMetadata q -> runClearMetadata q
|
RMReloadRemoteSchema q -> runReloadRemoteSchema q
|
||||||
RMReloadMetadata q -> runReloadMetadata q
|
RMIntrospectRemoteSchema q -> runIntrospectRemoteSchema q
|
||||||
|
|
||||||
RMCreateAction q -> runCreateAction q
|
RMAddRemoteSchemaPermissions q -> runAddRemoteSchemaPermissions q
|
||||||
RMDropAction q -> runDropAction q
|
RMDropRemoteSchemaPermissions q -> runDropRemoteSchemaPermissions q
|
||||||
RMUpdateAction q -> runUpdateAction q
|
|
||||||
RMCreateActionPermission q -> runCreateActionPermission q
|
|
||||||
RMDropActionPermission q -> runDropActionPermission q
|
|
||||||
|
|
||||||
RMCreateRestEndpoint q -> runCreateEndpoint q
|
RMCreateCronTrigger q -> runCreateCronTrigger q
|
||||||
RMDropRestEndpoint q -> runDropEndpoint q
|
RMDeleteCronTrigger q -> runDeleteCronTrigger q
|
||||||
|
RMCreateScheduledEvent q -> runCreateScheduledEvent q
|
||||||
|
RMDeleteScheduledEvent q -> runDeleteScheduledEvent q
|
||||||
|
RMGetScheduledEvents q -> runGetScheduledEvents q
|
||||||
|
RMGetEventInvocations q -> runGetEventInvocations q
|
||||||
|
|
||||||
RMSetCustomTypes q -> runSetCustomTypes q
|
RMCreateQueryCollection q -> runCreateCollection q
|
||||||
|
RMDropQueryCollection q -> runDropCollection q
|
||||||
|
RMAddQueryToCollection q -> runAddQueryToCollection q
|
||||||
|
RMDropQueryFromCollection q -> runDropQueryFromCollection q
|
||||||
|
RMAddCollectionToAllowlist q -> runAddCollectionToAllowlist q
|
||||||
|
RMDropCollectionFromAllowlist q -> runDropCollectionFromAllowlist q
|
||||||
|
|
||||||
RMDumpInternalState q -> runDumpInternalState q
|
RMReplaceMetadata q -> runReplaceMetadata q
|
||||||
|
RMExportMetadata q -> runExportMetadata q
|
||||||
|
RMClearMetadata q -> runClearMetadata q
|
||||||
|
RMReloadMetadata q -> runReloadMetadata q
|
||||||
|
|
||||||
RMGetCatalogState q -> runGetCatalogState q
|
RMCreateAction q -> runCreateAction q
|
||||||
RMSetCatalogState q -> runSetCatalogState q
|
RMDropAction q -> runDropAction q
|
||||||
|
RMUpdateAction q -> runUpdateAction q
|
||||||
|
RMCreateActionPermission q -> runCreateActionPermission q
|
||||||
|
RMDropActionPermission q -> runDropActionPermission q
|
||||||
|
|
||||||
RMSetApiLimits q -> runSetApiLimits q
|
RMCreateRestEndpoint q -> runCreateEndpoint q
|
||||||
RMRemoveApiLimits -> runRemoveApiLimits
|
RMDropRestEndpoint q -> runDropEndpoint q
|
||||||
|
|
||||||
RMSetMetricsConfig q -> runSetMetricsConfig q
|
RMSetCustomTypes q -> runSetCustomTypes q
|
||||||
RMRemoveMetricsConfig -> runRemoveMetricsConfig
|
|
||||||
|
|
||||||
RMBulk q -> encJFromList <$> indexedMapM (runMetadataQueryM env currentResourceVersion) q
|
RMDumpInternalState q -> runDumpInternalState q
|
||||||
|
|
||||||
|
RMGetCatalogState q -> runGetCatalogState q
|
||||||
|
RMSetCatalogState q -> runSetCatalogState q
|
||||||
|
|
||||||
|
RMSetApiLimits q -> runSetApiLimits q
|
||||||
|
RMRemoveApiLimits -> runRemoveApiLimits
|
||||||
|
|
||||||
|
RMSetMetricsConfig q -> runSetMetricsConfig q
|
||||||
|
RMRemoveMetricsConfig -> runRemoveMetricsConfig
|
||||||
|
|
||||||
|
RMBulk q -> encJFromList <$> indexedMapM (runMetadataQueryM env currentResourceVersion) q
|
||||||
|
|
||||||
runMetadataQueryV2M
|
runMetadataQueryV2M
|
||||||
:: ( MonadIO m
|
:: ( MonadIO m
|
||||||
|
@ -48,8 +48,8 @@ import Hasura.Server.Version (HasVersion)
|
|||||||
import Hasura.Session
|
import Hasura.Session
|
||||||
|
|
||||||
data RQLQueryV1
|
data RQLQueryV1
|
||||||
= RQAddExistingTableOrView !TrackTable
|
= RQAddExistingTableOrView !(TrackTable 'Postgres)
|
||||||
| RQTrackTable !TrackTable
|
| RQTrackTable !(TrackTable 'Postgres)
|
||||||
| RQUntrackTable !(UntrackTable 'Postgres)
|
| RQUntrackTable !(UntrackTable 'Postgres)
|
||||||
| RQSetTableIsEnum !SetTableIsEnum
|
| RQSetTableIsEnum !SetTableIsEnum
|
||||||
| RQSetTableCustomization !SetTableCustomization
|
| RQSetTableCustomization !SetTableCustomization
|
||||||
@ -59,9 +59,9 @@ data RQLQueryV1
|
|||||||
|
|
||||||
| RQCreateObjectRelationship !(CreateObjRel 'Postgres)
|
| RQCreateObjectRelationship !(CreateObjRel 'Postgres)
|
||||||
| RQCreateArrayRelationship !(CreateArrRel 'Postgres)
|
| RQCreateArrayRelationship !(CreateArrRel 'Postgres)
|
||||||
| RQDropRelationship !DropRel
|
| RQDropRelationship !(DropRel 'Postgres)
|
||||||
| RQSetRelationshipComment !SetRelComment
|
| RQSetRelationshipComment !(SetRelComment 'Postgres)
|
||||||
| RQRenameRelationship !RenameRel
|
| RQRenameRelationship !(RenameRel 'Postgres)
|
||||||
|
|
||||||
-- computed fields related
|
-- computed fields related
|
||||||
| RQAddComputedField !(AddComputedField 'Postgres)
|
| RQAddComputedField !(AddComputedField 'Postgres)
|
||||||
@ -80,7 +80,7 @@ data RQLQueryV1
|
|||||||
| RQDropSelectPermission !(DropPerm 'Postgres (SelPerm 'Postgres))
|
| RQDropSelectPermission !(DropPerm 'Postgres (SelPerm 'Postgres))
|
||||||
| RQDropUpdatePermission !(DropPerm 'Postgres (UpdPerm 'Postgres))
|
| RQDropUpdatePermission !(DropPerm 'Postgres (UpdPerm 'Postgres))
|
||||||
| RQDropDeletePermission !(DropPerm 'Postgres (DelPerm 'Postgres))
|
| RQDropDeletePermission !(DropPerm 'Postgres (DelPerm 'Postgres))
|
||||||
| RQSetPermissionComment !SetPermComment
|
| RQSetPermissionComment !(SetPermComment 'Postgres)
|
||||||
|
|
||||||
| RQGetInconsistentMetadata !GetInconsistentMetadata
|
| RQGetInconsistentMetadata !GetInconsistentMetadata
|
||||||
| RQDropInconsistentMetadata !DropInconsistentMetadata
|
| RQDropInconsistentMetadata !DropInconsistentMetadata
|
||||||
@ -139,7 +139,7 @@ data RQLQueryV1
|
|||||||
deriving (Eq)
|
deriving (Eq)
|
||||||
|
|
||||||
data RQLQueryV2
|
data RQLQueryV2
|
||||||
= RQV2TrackTable !TrackTableV2
|
= RQV2TrackTable !(TrackTableV2 'Postgres)
|
||||||
| RQV2SetTableCustomFields !SetTableCustomFields -- deprecated
|
| RQV2SetTableCustomFields !SetTableCustomFields -- deprecated
|
||||||
| RQV2TrackFunction !TrackFunctionV2
|
| RQV2TrackFunction !TrackFunctionV2
|
||||||
| RQV2ReplaceMetadata !ReplaceMetadataV2
|
| RQV2ReplaceMetadata !ReplaceMetadataV2
|
||||||
|
@ -2,13 +2,13 @@
|
|||||||
module Hasura.Server.API.V2Query where
|
module Hasura.Server.API.V2Query where
|
||||||
|
|
||||||
import Control.Lens
|
import Control.Lens
|
||||||
import Control.Monad.Trans.Control (MonadBaseControl)
|
import Control.Monad.Trans.Control (MonadBaseControl)
|
||||||
import Data.Aeson
|
import Data.Aeson
|
||||||
import Data.Aeson.Casing
|
import Data.Aeson.Casing
|
||||||
import Data.Aeson.TH
|
import Data.Aeson.TH
|
||||||
|
|
||||||
import qualified Data.Environment as Env
|
import qualified Data.Environment as Env
|
||||||
import qualified Network.HTTP.Client as HTTP
|
import qualified Network.HTTP.Client as HTTP
|
||||||
|
|
||||||
import Hasura.EncJSON
|
import Hasura.EncJSON
|
||||||
import Hasura.Metadata.Class
|
import Hasura.Metadata.Class
|
||||||
@ -22,11 +22,13 @@ import Hasura.RQL.DML.Types
|
|||||||
import Hasura.RQL.DML.Update
|
import Hasura.RQL.DML.Update
|
||||||
import Hasura.RQL.Types
|
import Hasura.RQL.Types
|
||||||
import Hasura.RQL.Types.Run
|
import Hasura.RQL.Types.Run
|
||||||
import Hasura.Server.Types (InstanceId (..), MaintenanceMode (..))
|
import Hasura.Server.Types (InstanceId (..), MaintenanceMode (..))
|
||||||
import Hasura.Server.Version (HasVersion)
|
import Hasura.Server.Version (HasVersion)
|
||||||
import Hasura.Session
|
import Hasura.Session
|
||||||
|
|
||||||
import qualified Hasura.Tracing as Tracing
|
import qualified Hasura.Backends.MSSQL.DDL.RunSQL as MSSQL
|
||||||
|
import qualified Hasura.Backends.MSSQL.Types as MSSQL
|
||||||
|
import qualified Hasura.Tracing as Tracing
|
||||||
|
|
||||||
data RQLQuery
|
data RQLQuery
|
||||||
= RQInsert !InsertQuery
|
= RQInsert !InsertQuery
|
||||||
@ -34,7 +36,7 @@ data RQLQuery
|
|||||||
| RQUpdate !UpdateQuery
|
| RQUpdate !UpdateQuery
|
||||||
| RQDelete !DeleteQuery
|
| RQDelete !DeleteQuery
|
||||||
| RQCount !CountQuery
|
| RQCount !CountQuery
|
||||||
|
| RMMssqlRunSql !MSSQL.MSSQLRunSQL
|
||||||
| RQRunSql !RunSQL
|
| RQRunSql !RunSQL
|
||||||
| RQBulk ![RQLQuery]
|
| RQBulk ![RQLQuery]
|
||||||
deriving (Show)
|
deriving (Show)
|
||||||
@ -112,10 +114,11 @@ runQueryM
|
|||||||
)
|
)
|
||||||
=> Env.Environment -> RQLQuery -> m EncJSON
|
=> Env.Environment -> RQLQuery -> m EncJSON
|
||||||
runQueryM env = \case
|
runQueryM env = \case
|
||||||
RQInsert q -> runInsert env q
|
RQInsert q -> runInsert env q
|
||||||
RQSelect q -> runSelect q
|
RQSelect q -> runSelect q
|
||||||
RQUpdate q -> runUpdate env q
|
RQUpdate q -> runUpdate env q
|
||||||
RQDelete q -> runDelete env q
|
RQDelete q -> runDelete env q
|
||||||
RQCount q -> runCount q
|
RQCount q -> runCount q
|
||||||
RQRunSql q -> runRunSQL q
|
RQRunSql q -> runRunSQL q
|
||||||
RQBulk l -> encJFromList <$> indexedMapM (runQueryM env) l
|
RMMssqlRunSql q -> MSSQL.runSQL q
|
||||||
|
RQBulk l -> encJFromList <$> indexedMapM (runQueryM env) l
|
||||||
|
@ -604,7 +604,7 @@ v1Alpha1PGDumpHandler b = do
|
|||||||
onlyAdmin
|
onlyAdmin
|
||||||
scRef <- asks (scCacheRef . hcServerCtx)
|
scRef <- asks (scCacheRef . hcServerCtx)
|
||||||
sc <- getSCFromRef scRef
|
sc <- getSCFromRef scRef
|
||||||
let sources = scPostgres sc
|
let sources = scSources sc
|
||||||
sourceName = PGD.prbSource b
|
sourceName = PGD.prbSource b
|
||||||
sourceConfig = unsafeSourceConfiguration @'Postgres =<< M.lookup sourceName sources
|
sourceConfig = unsafeSourceConfiguration @'Postgres =<< M.lookup sourceName sources
|
||||||
ci <- fmap _pscConnInfo sourceConfig
|
ci <- fmap _pscConnInfo sourceConfig
|
||||||
|
@ -171,8 +171,8 @@ computeMetrics sc _mtServiceTimings _mtPgVersion =
|
|||||||
|
|
||||||
where
|
where
|
||||||
-- TODO: multiple sources
|
-- TODO: multiple sources
|
||||||
pgTableCache = fromMaybe mempty $ unsafeTableCache @'Postgres defaultSource $ scPostgres sc
|
pgTableCache = fromMaybe mempty $ unsafeTableCache @'Postgres defaultSource $ scSources sc
|
||||||
pgFunctionCache = fromMaybe mempty $ unsafeFunctionCache @'Postgres defaultSource $ scPostgres sc
|
pgFunctionCache = fromMaybe mempty $ unsafeFunctionCache @'Postgres defaultSource $ scSources sc
|
||||||
userTables = Map.filter (not . isSystemDefined . _tciSystemDefined . _tiCoreInfo) pgTableCache
|
userTables = Map.filter (not . isSystemDefined . _tciSystemDefined . _tiCoreInfo) pgTableCache
|
||||||
countUserTables predicate = length . filter predicate $ Map.elems userTables
|
countUserTables predicate = length . filter predicate $ Map.elems userTables
|
||||||
|
|
||||||
|
@ -155,6 +155,9 @@ commonResponseHeadersIgnored =
|
|||||||
isSessionVariable :: Text -> Bool
|
isSessionVariable :: Text -> Bool
|
||||||
isSessionVariable = T.isPrefixOf "x-hasura-" . T.toLower
|
isSessionVariable = T.isPrefixOf "x-hasura-" . T.toLower
|
||||||
|
|
||||||
|
isReqUserId :: Text -> Bool
|
||||||
|
isReqUserId = (== "req_user_id") . T.toLower
|
||||||
|
|
||||||
mkClientHeadersForward :: [HTTP.Header] -> [HTTP.Header]
|
mkClientHeadersForward :: [HTTP.Header] -> [HTTP.Header]
|
||||||
mkClientHeadersForward reqHeaders =
|
mkClientHeadersForward reqHeaders =
|
||||||
xForwardedHeaders <> (filterSessionVariables . filterRequestHeaders) reqHeaders
|
xForwardedHeaders <> (filterSessionVariables . filterRequestHeaders) reqHeaders
|
||||||
|
42
server/src-rsr/mssql_table_metadata.sql
Normal file
42
server/src-rsr/mssql_table_metadata.sql
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
-- SCHEMA_NAME(..)
|
||||||
|
SELECT object.name, object.schema_id, object.object_id, object.type_desc,
|
||||||
|
JSON_QUERY([schema].json) AS [joined_sys_schema],
|
||||||
|
JSON_QUERY([column].json) AS [joined_sys_column]
|
||||||
|
FROM sys.objects object
|
||||||
|
CROSS APPLY (SELECT [column].name, [column].column_id, [column].is_nullable, [column].user_type_id,
|
||||||
|
JSON_QUERY([types].json) AS [joined_sys_type],
|
||||||
|
JSON_QUERY(ISNULL([relationships].json,'[]')) AS [joined_foreign_key_columns]
|
||||||
|
FROM sys.columns [column]
|
||||||
|
CROSS APPLY (SELECT name, schema_id, user_type_id FROM sys.types [type]
|
||||||
|
WHERE [type].user_type_id = [column].user_type_id
|
||||||
|
FOR JSON PATH, WITHOUT_ARRAY_WRAPPER)
|
||||||
|
AS [types](json)
|
||||||
|
CROSS APPLY (SELECT fk.*,
|
||||||
|
referenced_table.name AS joined_referenced_table_name,
|
||||||
|
referenced_column.name AS joined_referenced_column_name,
|
||||||
|
JSON_QUERY([schema].json) AS [joined_referenced_sys_schema]
|
||||||
|
FROM sys.foreign_key_columns [fk],
|
||||||
|
sys.objects AS referenced_table,
|
||||||
|
sys.columns AS referenced_column
|
||||||
|
CROSS APPLY (SELECT [schema].name, [schema].schema_id
|
||||||
|
FROM sys.schemas [schema]
|
||||||
|
WHERE [schema].schema_id = object.schema_id
|
||||||
|
FOR JSON PATH, WITHOUT_ARRAY_WRAPPER)
|
||||||
|
AS [schema](json)
|
||||||
|
WHERE [object].object_id = fk.parent_object_id
|
||||||
|
AND [referenced_table].object_id = fk.referenced_object_id
|
||||||
|
AND [referenced_column].object_id = [referenced_table].object_id
|
||||||
|
AND [referenced_column].column_id = fk.referenced_column_id
|
||||||
|
AND [column].column_id = fk.parent_column_id
|
||||||
|
FOR JSON PATH)
|
||||||
|
AS [relationships](json)
|
||||||
|
WHERE [column].object_id = object.object_id
|
||||||
|
FOR JSON PATH)
|
||||||
|
AS [column](json)
|
||||||
|
CROSS APPLY (SELECT [schema].name, [schema].schema_id
|
||||||
|
FROM sys.schemas [schema]
|
||||||
|
WHERE [schema].schema_id = object.schema_id
|
||||||
|
FOR JSON PATH, WITHOUT_ARRAY_WRAPPER)
|
||||||
|
AS [schema](json)
|
||||||
|
WHERE object.type_desc = 'USER_TABLE'
|
||||||
|
FOR JSON PATH
|
Loading…
Reference in New Issue
Block a user