mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-09-20 15:09:02 +03:00
server: remove hdb_lib and add tests for read-only source
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/3634 Co-authored-by: Philip Lykke Carlsen <358550+plcplc@users.noreply.github.com> GitOrigin-RevId: 2db62a279496cd7e5dd57bdf02c3efa7b70042c8
This commit is contained in:
parent
109a0beca8
commit
e87433c2bb
@ -2,6 +2,7 @@ haskell-tests
|
||||
no-auth
|
||||
admin-secret
|
||||
admin-secret-unauthorized-role
|
||||
read-only-db
|
||||
jwt-rs512
|
||||
jwt-ed25519
|
||||
jwt-stringified
|
||||
|
@ -782,6 +782,51 @@ startup-db-calls)
|
||||
# end verbose logging tests
|
||||
;;
|
||||
|
||||
read-only-db)
|
||||
## read-only DB tests; Hasura should start and run read queries against a read-only DB
|
||||
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH READ-ONLY DATABASE ########>\n"
|
||||
export HASURA_GRAPHQL_ADMIN_SECRET="HGE$RANDOM$RANDOM"
|
||||
|
||||
export HASURA_GRAPHQL_ENABLED_LOG_TYPES="startup,http-log,webhook-log,websocket-log,query-log"
|
||||
export HASURA_GRAPHQL_LOG_LEVEL="debug"
|
||||
export HASURA_GRAPHQL_DEV_MODE="false"
|
||||
export HASURA_GRAPHQL_ADMIN_INTERNAL_ERRORS="false"
|
||||
|
||||
# setup the database for read-only access
|
||||
# 'test_graphql_read_only_source.py' assumes 'HASURA_READONLY_DB_URL' is set
|
||||
# Note: setting default_transaction_mode to read-only etc. doesn't work for
|
||||
# DDL statements. To replicate read-only access even for DDLs, we need to
|
||||
# create a read-only user
|
||||
readonly_sql=$(cat <<EOF
|
||||
CREATE USER hasuraro WITH PASSWORD 'passme';
|
||||
GRANT CONNECT ON DATABASE pg_source_1 TO hasuraro;
|
||||
GRANT USAGE ON SCHEMA public TO hasuraro;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA public TO hasuraro;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA pg_catalog TO hasuraro;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA information_schema TO hasuraro;
|
||||
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT SELECT ON TABLES TO hasuraro;
|
||||
EOF
|
||||
)
|
||||
psql "$HASURA_GRAPHQL_PG_SOURCE_URL_1" -c "$readonly_sql"
|
||||
|
||||
export HASURA_READONLY_DB_URL="postgresql://hasuraro:passme@localhost:5432/pg_source_1"
|
||||
|
||||
run_hge_with_args serve
|
||||
wait_for_port 8080
|
||||
|
||||
# and then test graphql queries work
|
||||
pytest -n 1 --hge-urls "$HGE_URL" \
|
||||
--pg-urls "$HASURA_GRAPHQL_PG_SOURCE_URL_1" \
|
||||
--hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" \
|
||||
--test-read-only-source \
|
||||
test_graphql_read_only_source.py
|
||||
|
||||
unset HASURA_GRAPHQL_ENABLED_LOG_TYPES
|
||||
kill_hge_servers
|
||||
|
||||
# end read-only DB tests
|
||||
;;
|
||||
|
||||
remote-schema-https)
|
||||
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH SECURE REMOTE SCHEMA #########################>\n"
|
||||
|
||||
|
@ -139,8 +139,6 @@ resolveDatabaseMetadata ::
|
||||
SourceTypeCustomization ->
|
||||
m (Either QErr (ResolvedSource ('Postgres pgKind)))
|
||||
resolveDatabaseMetadata sourceConfig sourceCustomization = runExceptT do
|
||||
runTx (_pscExecCtx sourceConfig) Q.ReadWrite ensureMetadataSupportingDefinitions
|
||||
|
||||
(tablesMeta, functionsMeta, pgScalars) <- runTx (_pscExecCtx sourceConfig) Q.ReadOnly $ do
|
||||
tablesMeta <- fetchTableMetadata
|
||||
functionsMeta <- fetchFunctionMetadata
|
||||
@ -299,11 +297,6 @@ upMigrationsUntil43 =
|
||||
(migrationsFromFile [5 .. 40]) ++ migrationsFromFile [42 .. 43]
|
||||
)
|
||||
|
||||
-- | Ensure that the supporting definitions used in metadata fetching have been
|
||||
-- loaded.
|
||||
ensureMetadataSupportingDefinitions :: forall m. MonadTx m => m ()
|
||||
ensureMetadataSupportingDefinitions = liftTx $ Q.multiQE defaultTxErrorHandler $(makeRelativeToProject "src-rsr/pg_metadata_lib.sql" >>= Q.sqlFromFile)
|
||||
|
||||
-- | Fetch Postgres metadata of all user tables
|
||||
fetchTableMetadata ::
|
||||
forall pgKind m.
|
||||
|
@ -56,17 +56,37 @@ LEFT JOIN LATERAL
|
||||
-- The columns 'pg_attribute.attidentity' and 'pg_attribute.attgenerated' are
|
||||
-- not available in older versions of Postgres, because those versions do not
|
||||
-- implement the concepts the catalog columns represent.
|
||||
-- Therefore we define and use the polyfill functions
|
||||
-- 'hdb_lib.pg_attidentity' and 'hdb_lib.pg_attgenerated', which ensure the
|
||||
-- presence of these columns in this script.
|
||||
INNER JOIN hdb_lib.pg_attidentity() identitypolyfill
|
||||
-- To support older versions we apply an aliasing hack that ensures
|
||||
-- _something_ called e.g. attidentity is in scope.
|
||||
-- Originally sourced from: https://stackoverflow.com/questions/18951071/postgres-return-a-default-value-when-a-column-doesnt-exist.
|
||||
INNER JOIN
|
||||
(
|
||||
SELECT attrelid, attnum, attname, CASE WHEN attidentity_exists
|
||||
THEN attidentity::text
|
||||
ELSE ''::text
|
||||
END as attidentity
|
||||
FROM pg_catalog.pg_attribute
|
||||
CROSS JOIN (SELECT current_setting('server_version_num')::int >= 100000)
|
||||
AS attidentity(attidentity_exists)
|
||||
) AS identitypolyfill
|
||||
ON identitypolyfill.attrelid = "column".attrelid
|
||||
AND identitypolyfill.attnum = "column".attnum
|
||||
AND identitypolyfill.attname = "column".attname
|
||||
INNER JOIN hdb_lib.pg_attgenerated() generatedpolyfill
|
||||
|
||||
INNER JOIN
|
||||
(
|
||||
SELECT attrelid, attnum, attname, CASE WHEN attgenerated_exists
|
||||
THEN attgenerated::text
|
||||
ELSE ''::text
|
||||
END as attgenerated
|
||||
FROM pg_catalog.pg_attribute
|
||||
CROSS JOIN (SELECT current_setting('server_version_num')::int >= 120000)
|
||||
AS attgenerated(attgenerated_exists)
|
||||
) AS generatedpolyfill
|
||||
ON generatedpolyfill.attrelid = "column".attrelid
|
||||
AND generatedpolyfill.attnum = "column".attnum
|
||||
AND generatedpolyfill.attname = "column".attname
|
||||
|
||||
LEFT JOIN pg_catalog.pg_type "type"
|
||||
ON "type".oid = "column".atttypid
|
||||
LEFT JOIN pg_catalog.pg_type base_type
|
||||
|
@ -1,49 +0,0 @@
|
||||
CREATE SCHEMA IF NOT EXISTS hdb_lib;
|
||||
|
||||
CREATE OR REPLACE FUNCTION
|
||||
hdb_lib.pg_attidentity()
|
||||
RETURNS TABLE (attrelid oid, attname name, attnum smallint, attidentity char) AS $$
|
||||
BEGIN
|
||||
IF current_setting('server_version_num')::int >= 100000
|
||||
THEN RETURN QUERY
|
||||
SELECT a.attrelid, a.attname, a.attnum, a.attidentity::char
|
||||
FROM pg_catalog.pg_attribute a;
|
||||
ELSE
|
||||
-- Always return attidentity = '', indicating that the column is not an
|
||||
-- identity column.
|
||||
RETURN QUERY
|
||||
SELECT a.attrelid, a.attname, a.attnum, ''::char as attidentity
|
||||
FROM pg_catalog.pg_attribute a;
|
||||
END IF;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
COMMENT ON FUNCTION hdb_lib.pg_attidentity() IS
|
||||
'The column "pg_catalog.pg_attribute(attidentity)" was only introduced in PG 10,
|
||||
along with with the introduction of identity columns.
|
||||
This function provides the "attidentity" column in a cross-version compatible way.
|
||||
See https://www.postgresql.org/docs/10/catalog-pg-attribute.html for details.
|
||||
';
|
||||
|
||||
CREATE OR REPLACE FUNCTION
|
||||
hdb_lib.pg_attgenerated()
|
||||
RETURNS TABLE (attrelid oid, attname name, attnum smallint, attgenerated char) AS $$
|
||||
BEGIN
|
||||
IF current_setting('server_version_num')::int >= 120000
|
||||
THEN RETURN QUERY
|
||||
SELECT a.attrelid, a.attname, a.attnum, a.attgenerated::char
|
||||
FROM pg_catalog.pg_attribute a;
|
||||
ELSE
|
||||
-- Always return attgenerated = '', indicating that the column is not a
|
||||
-- generated column.
|
||||
RETURN QUERY
|
||||
SELECT a.attrelid, a.attname, a.attnum, ''::char as attgenerated
|
||||
FROM pg_catalog.pg_attribute a;
|
||||
END IF;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
COMMENT ON FUNCTION hdb_lib.pg_attgenerated() IS
|
||||
'The column "pg_catalog.pg_attribute(attgenerated)" was only introduced in PG 12,
|
||||
along with the introduction of generated columns.
|
||||
This function provides the "attgenerated" column in a cross-version compatible way.
|
||||
See https://www.postgresql.org/docs/12/catalog-pg-attribute.html for details.
|
||||
';
|
@ -45,20 +45,41 @@ LEFT JOIN LATERAL
|
||||
'is_updatable', NOT (identitypolyfill.attidentity = 'a' OR generatedpolyfill.attgenerated = 's'))
|
||||
)) AS info
|
||||
FROM pg_catalog.pg_attribute "column"
|
||||
|
||||
-- The columns 'pg_attribute.attidentity' and 'pg_attribute.attgenerated' are
|
||||
-- not available in older versions of Postgres, because those versions do not
|
||||
-- implement the concepts the catalog columns represent.
|
||||
-- Therefore we define and use the polyfill functions
|
||||
-- 'hdb_lib.pg_attidentity' and 'hdb_lib.pg_attgenerated', which ensure the
|
||||
-- presence of these columns in this script.
|
||||
INNER JOIN hdb_lib.pg_attidentity() identitypolyfill
|
||||
-- To support older versions we apply an aliasing hack that ensures
|
||||
-- _something_ called e.g. attidentity is in scope.
|
||||
-- Originally sourced from: https://stackoverflow.com/questions/18951071/postgres-return-a-default-value-when-a-column-doesnt-exist.
|
||||
INNER JOIN
|
||||
(
|
||||
SELECT attrelid, attnum, attname, CASE WHEN attidentity_exists
|
||||
THEN attidentity::text
|
||||
ELSE ''::text
|
||||
END as attidentity
|
||||
FROM pg_catalog.pg_attribute
|
||||
CROSS JOIN (SELECT current_setting('server_version_num')::int >= 100000)
|
||||
AS attidentity(attidentity_exists)
|
||||
) AS identitypolyfill
|
||||
ON identitypolyfill.attrelid = "column".attrelid
|
||||
AND identitypolyfill.attnum = "column".attnum
|
||||
AND identitypolyfill.attname = "column".attname
|
||||
INNER JOIN hdb_lib.pg_attgenerated() generatedpolyfill
|
||||
|
||||
INNER JOIN
|
||||
(
|
||||
SELECT attrelid, attnum, attname, CASE WHEN attgenerated_exists
|
||||
THEN attgenerated::text
|
||||
ELSE ''::text
|
||||
END as attgenerated
|
||||
FROM pg_catalog.pg_attribute
|
||||
CROSS JOIN (SELECT current_setting('server_version_num')::int >= 120000)
|
||||
AS attgenerated(attgenerated_exists)
|
||||
) AS generatedpolyfill
|
||||
ON generatedpolyfill.attrelid = "column".attrelid
|
||||
AND generatedpolyfill.attnum = "column".attnum
|
||||
AND generatedpolyfill.attname = "column".attname
|
||||
|
||||
LEFT JOIN pg_catalog.pg_type "type"
|
||||
ON "type".oid = "column".atttypid
|
||||
LEFT JOIN pg_catalog.pg_type base_type
|
||||
|
@ -210,6 +210,13 @@ This option may result in test failures if the schema has to change between the
|
||||
help="Run testcases for auth webhook header forwarding"
|
||||
)
|
||||
|
||||
parser.addoption(
|
||||
"--test-read-only-source",
|
||||
action="store_true",
|
||||
default=False,
|
||||
required=False,
|
||||
help="Run testcases with a read-only database source"
|
||||
)
|
||||
|
||||
|
||||
#By default,
|
||||
|
@ -0,0 +1,19 @@
|
||||
description: Simple GraphQL query on a read-only source
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
data:
|
||||
aves:
|
||||
- id: 1
|
||||
name: Booted Eagle
|
||||
- id: 2
|
||||
name: Hooded Merganser
|
||||
query:
|
||||
operationName: getBirds
|
||||
query: |
|
||||
query getBirds {
|
||||
aves {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
# As this is a read-only test, we can't create the schema/tables as part of the
|
||||
# HGE metadata. This setup assumes the tables are already created.
|
||||
# The schema assumed is -
|
||||
# CREATE TABLE aves (id SERIAL PRIMARY KEY, name TEXT)
|
||||
# And (already existing) data assumed is -
|
||||
# - id: 1
|
||||
# name: Booted Eagle
|
||||
# - id: 2
|
||||
# name: Hooded Merganser
|
||||
|
||||
type: bulk
|
||||
args:
|
||||
|
||||
# if a default source is not added, the teardown doesn't happen for PG backend.
|
||||
# That is weird! Why is it always assumed that PG backend tests will always have
|
||||
# a default source?
|
||||
|
||||
- type: pg_add_source
|
||||
args:
|
||||
name: pg_readonly
|
||||
configuration:
|
||||
connection_info:
|
||||
database_url:
|
||||
from_env:
|
||||
HASURA_READONLY_DB_URL
|
||||
|
||||
# track tables
|
||||
- type: pg_track_table
|
||||
args:
|
||||
source: pg_readonly
|
||||
table:
|
||||
name: aves
|
@ -0,0 +1,28 @@
|
||||
# As this is a read-only test, we can't create the schema/tables as part of the
|
||||
# HGE metadata. This setup assumes the tables are already created.
|
||||
# The schema assumed is -
|
||||
# CREATE TABLE aves (id SERIAL PRIMARY KEY, name TEXT)
|
||||
# And (already existing) data assumed is -
|
||||
# - id: 1
|
||||
# name: Booted Eagle
|
||||
# - id: 2
|
||||
# name: Hooded Merganser
|
||||
|
||||
type: bulk
|
||||
args:
|
||||
|
||||
- type: citus_add_source
|
||||
args:
|
||||
name: citus_readonly
|
||||
configuration:
|
||||
connection_info:
|
||||
database_url:
|
||||
from_env:
|
||||
HASURA_READONLY_DB_URL
|
||||
|
||||
# track tables
|
||||
- type: citus_track_table
|
||||
args:
|
||||
source: citus_readonly
|
||||
table:
|
||||
name: aves
|
@ -0,0 +1,7 @@
|
||||
type: bulk
|
||||
args:
|
||||
|
||||
- type: pg_drop_source
|
||||
args:
|
||||
name: pg_readonly
|
||||
|
@ -0,0 +1,6 @@
|
||||
type: bulk
|
||||
args:
|
||||
|
||||
- type: citus_drop_source
|
||||
args:
|
||||
name: citus_readonly
|
@ -0,0 +1,23 @@
|
||||
description: GraphQL mutation which fails on a read-only source
|
||||
url: /v1/graphql
|
||||
status: 200
|
||||
response:
|
||||
errors:
|
||||
- extensions:
|
||||
path: $
|
||||
code: unexpected
|
||||
message: database query error
|
||||
query:
|
||||
operationName: updateBirds
|
||||
query: |
|
||||
mutation updateBirds {
|
||||
update_aves(
|
||||
_set:{name: "Long-tailed Shrike"}
|
||||
where: {id: {_eq: 1}}
|
||||
) {
|
||||
returning {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
67
server/tests-py/test_graphql_read_only_source.py
Normal file
67
server/tests-py/test_graphql_read_only_source.py
Normal file
@ -0,0 +1,67 @@
|
||||
import pytest
|
||||
import psycopg2
|
||||
from validate import check_query_f
|
||||
from context import PytestConf
|
||||
|
||||
# Mark that all tests in this module can be run as server upgrade tests
|
||||
pytestmark = pytest.mark.allow_server_upgrade_test
|
||||
|
||||
usefixtures = pytest.mark.usefixtures
|
||||
|
||||
if not PytestConf.config.getoption('--test-read-only-source'):
|
||||
pytest.skip('--test-read-only-source flag is missing, skipping read-only tests',
|
||||
allow_module_level=True)
|
||||
|
||||
@pytest.mark.parametrize('transport', ['http', 'websocket'])
|
||||
@pytest.mark.parametrize('backend', ['postgres', 'citus'])
|
||||
#@pytest.mark.parametrize('backend', ['citus', 'mssql', 'postgres'])
|
||||
@usefixtures('setup_schema_externally', 'per_class_tests_db_state')
|
||||
class TestGraphQLOnReadOnlySource:
|
||||
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return 'queries/graphql_query/read_only_source'
|
||||
|
||||
setup_metadata_api_version = 'v2'
|
||||
|
||||
def test_query_aves(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/select_query_aves.yaml', transport)
|
||||
|
||||
# graphql-engine's websocket response is different than in http on execution
|
||||
# errors; so this test is run only on http
|
||||
def test_mutation_aves(self, hge_ctx, transport):
|
||||
check_query_f(hge_ctx, self.dir() + '/update_query_aves.yaml', 'http')
|
||||
|
||||
|
||||
# As this is a read-only test, we can't create the schema/tables as part of the
|
||||
# HGE metadata. Hence, we create it as a separate fixture, where we execute the
|
||||
# DDLs directly on the database.
|
||||
@pytest.fixture(scope='class')
|
||||
def setup_schema_externally(hge_ctx):
|
||||
if hge_ctx.backend in ['postgres', 'citus']:
|
||||
conn = setup_postgres_schema(hge_ctx.pg_url)
|
||||
yield conn
|
||||
teardown_postgres_schema(conn)
|
||||
elif hge_ctx.backend == 'mssql':
|
||||
# TODO: will this be pg_url?
|
||||
setup_mssql_schema(hge_ctx.pg_url)
|
||||
else:
|
||||
raise Exception('setup_schema_externally fixture was used with an unknown backend')
|
||||
|
||||
def setup_postgres_schema(conn_url):
|
||||
conn = psycopg2.connect(conn_url)
|
||||
cur = conn.cursor()
|
||||
cur.execute("CREATE TABLE aves (id serial PRIMARY KEY, name TEXT);")
|
||||
cur.execute("INSERT INTO aves (name) VALUES ('Booted Eagle'), ('Hooded Merganser');")
|
||||
conn.commit()
|
||||
return conn
|
||||
|
||||
def teardown_postgres_schema(conn):
|
||||
cur = conn.cursor()
|
||||
cur.execute("DROP TABLE aves;")
|
||||
conn.commit()
|
||||
cur.close()
|
||||
conn.close()
|
||||
|
||||
def setup_mssql_schema(conn_url):
|
||||
pass
|
Loading…
Reference in New Issue
Block a user