breaking: server logging changes (close #507, close #2171) (#1835)

This commit is contained in:
Anon Ray 2019-07-11 11:07:06 +05:30 committed by Shahidh K Muhammed
parent 5342c9b869
commit 8f1e7dbc8d
43 changed files with 1978 additions and 503 deletions

View File

@ -365,6 +365,31 @@ pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" -
kill_hge_servers
# verbose logging tests
echo -e "\n$(time_elapsed): <########## TEST GRAPHQL-ENGINE WITH QUERY LOG ########>\n"
TEST_TYPE="query-logs"
export HASURA_GRAPHQL_ENABLED_LOG_TYPES=" startup,http-log,webhook-log,websocket-log,query-log"
#run_hge_with_args serve
# we are doing this instead of calling run_hge_with_args, because we want to save in a custom log file
i=$((TIX_FILE_INDEX++))
export HPCTIXFILE="${OUTPUT_FOLDER}/hpc/graphql-engine-${i}-${TEST_TYPE}.tix"
rm -f "$HPCTIXFILE"
TIX_FILES="$TIX_FILES:$HPCTIXFILE"
set -x
export LOGGING_TEST_LOGFILE_PATH="$OUTPUT_FOLDER/graphql-engine-verbose-logging.log"
"$GRAPHQL_ENGINE" serve 2>&1 > "$LOGGING_TEST_LOGFILE_PATH" & HGE_PIDS="$HGE_PIDS $!"
set +x
wait_for_port 8080
pytest -n 1 -vv --hge-urls "$HGE_URL" --pg-urls "$HASURA_GRAPHQL_DATABASE_URL" --hge-key="$HASURA_GRAPHQL_ADMIN_SECRET" --test-logging test_logging.py
unset HASURA_GRAPHQL_ENABLED_LOG_TYPES
kill_hge_servers
# end verbose logging tests
# webhook tests

View File

@ -182,6 +182,10 @@ ul {
width: 100%;
}
#docs-content table caption {
position: relative;
}
/*** random overrides ***/
.wy-plain-list-decimal ol,

View File

@ -21,5 +21,8 @@ GraphQL engine container:
{"timestamp":"2018-10-09T11:20:32.054+0000", "level":"info", "type":"http-log", "detail":{"status":200, "query_hash":"01640c6dd131826cff44308111ed40d7fbd1cbed", "http_version":"HTTP/1.1", "query_execution_time":3.0177627e-2, "request_id":null, "url":"/v1/graphql", "user":{"x-hasura-role":"admin"}, "ip":"127.0.0.1", "response_size":209329, "method":"POST", "detail":null}}
...
**See:**
See https://docs.docker.com/config/containers/logging for more details on logging in Docker.
- https://docs.docker.com/config/containers/logging for more details on logging in Docker.
- :doc:`../logging` for more details on Hasura logs.

View File

@ -185,6 +185,17 @@ For ``serve`` sub-command these are the flags and ENV variables available:
- Set the value to ``/srv/console-assets`` for the console to load assets from the server itself
instead of CDN. *(Available for versions > v1.0.0-beta.1)*
* - ``enabled-log-types``
- ``HASURA_GRAPHQL_ENABLED_LOG_TYPES``
- Set the enabled log types. This is a comma-separated list of log-types to
enable. Default: ``startup, http-log, webhook-log, websocket-log``. See
:ref:`log types <log-types>` for more details.
* - ``log-level``
- ``HASURA_GRAPHQL_LOG_LEVEL``
- Set the logging level. Default: ``info``. Options: ``debug``, ``info``,
``warn``, ``error``.
.. note::
When the equivalent flags for environment variables are used, the flags will take precedence.

View File

@ -16,4 +16,8 @@ of Hasura GraphQL engine deployed on Heroku:
2018-10-09T11:18:21.306000+00:00 app[web.1]: {"timestamp":"2018-10-09T11:18:21.305+0000", "level":"info", "type":"http-log", "detail":{"status":200, "query_hash":"48c74f902b53a886f9ddc1b7dd12a4a6020d70c3", "http_version":"HTTP/1.1", "query_execution_time":9.477913e-3, "request_id":"b7bb6fb3-97b3-4c6f-a54a-1e0f71a190e9", "url":"/v1/graphql", "user":{"x-hasura-role":"admin"}, "ip":"171.61.77.16", "response_size":15290, "method":"POST", "detail":null}}
...
See https://devcenter.heroku.com/articles/logging for more details on logging in Heroku.
**See:**
- https://devcenter.heroku.com/articles/logging for more details on logging in Heroku.
- :doc:`../logging` for more details on Hasura logs

View File

@ -17,4 +17,8 @@ service, i.e. ``hasura``:
...
See https://kubernetes.io/docs/concepts/cluster-administration/logging for more details on logging in Kubernetes.
**See:**
- https://kubernetes.io/docs/concepts/cluster-administration/logging for more details on logging in Kubernetes.
- :doc:`../logging` for more details on Hasura logs

View File

@ -15,10 +15,830 @@ Based on your deployment method, Hasura GraphQL engine logs can be accessed as f
- :doc:`On Docker <docker/logging>`
- :doc:`On Kubernetes <kubernetes/logging>`
.. _log-types:
Different log-types
-------------------
The Hasura GraphQL engine has different kind of log-types depending on the sub-system or the layer. A log-type is simply the ``type`` field in a log line, which indicates which sub-system the log comes from.
For example, the HTTP webserver logs incoming requests as access log, and is called ``http-log``. Similarly logs from the websocket layer is called ``websocket-log``, logs from the event trigger system is called ``event-trigger`` etc.
You can configure the GraphQL engine to enable/disable certain log-types using the the ``--enabled-log-types`` flag or the ``HASURA_GRAPHQL_ENABLED_LOG_TYPES`` env var. See :doc:`../deployment/graphql-engine-flags/reference`
Default enabled log-types are: ``startup, http-log, webhook-log, websocket-log``
All the log-types that can be enabled/disabled are:
.. list-table:: Configurable log-types
:header-rows: 1
:widths: 10 25 10
* - Log type
- Description
- Log Level
* - ``startup``
- Information that is logged during startup
- ``info``
* - ``query-log``
- Logs: the entire GraphQL query with variables, generated SQL statements
(only for queries, not for mutations/subscriptions or remote schema
queries), the operation name (if provided in the GraphQL request)
- ``info``
* - ``http-log``
- Http access and error logs at the webserver layer (handling GraphQL and metadata requests)
- ``info`` and ``error``
* - ``websocket-log``
- Websocket events and error logs at the websocket server layer (handling GraphQL requests)
- ``info`` and ``error``
* - ``webhook-log``
- Logs responses and errors from the authorization webhook (if setup)
- ``info`` and ``error``
Apart from the above, there are other internal log-types which cannot be configured:
.. list-table:: Internal log-types
:header-rows: 1
:widths: 10 25 10
* - Log type
- Description
- Log Level
* - ``pg-client``
- Logs from the postgres client library
- ``warn``
* - ``metadata``
- Logs inconsistent metadata items
- ``warn``
* - ``jwk-refresh-log``
- Logs information and errors about periodic refreshing of JWK
- ``info`` and ``error``
* - ``telemetry-log``
- Logs error (if any) while sending out telemetry data
- ``info``
* - ``event-trigger``
- Logs HTTP responses from the webhook, HTTP exceptions and internal
errors
- ``info`` and ``error``
* - ``ws-server``
- Debug logs from the websocket server, mostly used internally for debugging
- ``debug``
* - ``schema-sync-thread``
- Logs internal events, when it detects schema has changed on Postgres and
when it reloads the schema
- ``info`` and ``error``
Logging levels
--------------
You can set the desired logging level on the server using the ``log-level`` flag or the ``HASURA_GRAPHQL_LOG_LEVEL`` env var. See :doc:`../deployment/graphql-engine-flags/reference`.
The default log-level is ``info``.
Setting a log-level will print all logs of priority greater than the set level. The log-level hierarchy is: ``debug > info > warn > error``
For example, setting ``--log-level=warn``, will enable all warn and error level logs only. So even if the you have enabled ``query-log`` it won't be printed as the level of ``query-log`` is ``info``.
See :ref:`log-types <log-types>` for more details on log-level of each log-type.
Log structure and metrics
-------------------------
All requests are identified by a request id. If the client sends a ``x-request-id`` header then that is used, otherwise a request id is generated for each request. This is also sent back to the client as a response header (``x-request-id``). This is useful to correlate logs from the server and the client.
**query-log** structure
^^^^^^^^^^^^^^^^^^^^^^^
On enabling verbose logging, i.e. enabling ``query-log``,
GraphQL engine will log the full GraphQL query object on each request.
It will also log the generated SQL for GraphQL queries (but not mutations and
subscriptions).
.. code-block:: json
{
"timestamp": "2019-06-03T13:25:10.915+0530",
"level": "info",
"type": "query-log",
"detail": {
"request_id": "840f952d-c489-4d21-a87a-cc23ad17926a",
"query": {
"variables": {
"limit": 10
},
"operationName": "getProfile",
"query": "query getProfile($limit: Int!) {\n profile(limit: $limit, where: {username: {_like: \"%a%\"}}) {\n username\n }\n myusername: profile (where: {username: {_eq: \"foobar\"}}) {\n username\n }\n}\n"
},
"generated_sql": {
"profile": {
"prepared_arguments": ["{\"x-hasura-role\":\"admin\"}", "%a%"],
"query": "SELECT coalesce(json_agg(\"root\" ), '[]' ) AS \"root\" FROM (SELECT row_to_json((SELECT \"_1_e\" FROM (SELECT \"_0_root.base\".\"username\" AS \"username\" ) AS \"_1_e\" ) ) AS \"root\" FROM (SELECT * FROM \"public\".\"profile\" WHERE ((\"public\".\"profile\".\"username\") LIKE ($2)) ) AS \"_0_root.base\" LIMIT 10 ) AS \"_2_root\" "
},
"myusername": {
"prepared_arguments": ["{\"x-hasura-role\":\"admin\"}", "foobar"],
"query": "SELECT coalesce(json_agg(\"root\" ), '[]' ) AS \"root\" FROM (SELECT row_to_json((SELECT \"_1_e\" FROM (SELECT \"_0_root.base\".\"username\" AS \"username\" ) AS \"_1_e\" ) ) AS \"root\" FROM (SELECT * FROM \"public\".\"profile\" WHERE ((\"public\".\"profile\".\"username\") = ($2)) ) AS \"_0_root.base\" ) AS \"_2_root\" "
}
}
}
}
The ``type`` of in the log with be ``query-log``. All the details are nested
under the ``detail`` key.
This log contains 3 important fields:
- ``request_id``: A unique ID for each request. If the client sends a
``x-request-id`` header then that is respected, otherwise a UUID is generated
for each request. This is useful to correlate between ``http-log`` and
``query-log``.
- ``query``: Contains the full GraphQL request including the variables and
operation name.
- ``generated_sql``: this contains the generated SQL for GraphQL queries. For
mutations and subscriptions this field will be ``null``.
**http-log** structure
^^^^^^^^^^^^^^^^^^^^^^
This is how the HTTP access logs look like:
- On success response:
.. code-block:: json
{
"timestamp": "2019-05-30T23:40:24.654+0530",
"level": "info",
"type": "http-log",
"detail": {
"operation": {
"query_execution_time": 0.009240042,
"user_vars": {
"x-hasura-role": "user"
},
"error": null,
"request_id": "072b3617-6653-4fd5-b5ee-580e9d098c3d",
"response_size": 105,
"query": null
},
"http_info": {
"status": 200,
"http_version": "HTTP/1.1",
"url": "/v1/graphql",
"ip": "127.0.0.1",
"method": "POST"
}
}
}
- On error response:
.. code-block:: json
{
"timestamp": "2019-05-29T15:22:37.834+0530",
"level": "info",
"type": "http-log",
"detail": {
"operation": {
"query_execution_time": 0.000656144,
"user_vars": {
"x-hasura-role": "user",
"x-hasura-user-id": "1"
},
"error": {
"path": "$.selectionSet.profile.selectionSet.usernamex",
"error": "field \"usernamex\" not found in type: 'profile'",
"code": "validation-failed"
},
"request_id": "072b3617-6653-4fd5-b5ee-580e9d098c3d",
"response_size": 142,
"query": {
"variables": {
"limit": 10
},
"operationName": "getProfile",
"query": "query getProfile($limit: Int!) { profile(limit: $limit, where:{username: {_like: \"%a%\"}}) { usernamex} }"
}
},
"http_info": {
"status": 200,
"http_version": "HTTP/1.1",
"url": "/v1/graphql",
"ip": "127.0.0.1",
"method": "POST"
}
}
The ``type`` in the log will be ``http-log`` for HTTP access/error log. This
log contains basic information about the HTTP request and the GraphQL operation.
It has two important "keys" under the ``detail`` section - ``operation`` and ``http_info``.
``http_info`` lists various information regarding the HTTP request, e.g. IP
address, URL path, HTTP status code etc.
``operation`` lists various information regarding the GraphQL query/operation.
- ``query_execution_time``: the time taken to parse the GraphQL query (from JSON
request), compile it to SQL with permissions and user session variables, and
then executing it and fetching the results back from Postgres. The unit is in
seconds.
- ``user_vars``: contains the user session variables. Or the ``x-hasura-*``
session variables inferred from the authorization mode.
- ``request_id``: A unique ID for each request. If the client sends a
``x-request-id`` header then that is respected, otherwise a UUID is generated
for each request.
- ``response_size``: Size of the response in bytes.
- ``error``: *optional*. Will contain the error object when there is an error,
otherwise this will be ``null``. This key can be used to detect if there is an
error in the request. The status code for error requests will be ``200`` on
the ``v1/graphql`` endpoint.
- ``query``: *optional*. This will contain the GraphQL query object only when
there is an error. On successful response this will be ``null``.
**websocket-log** structure
^^^^^^^^^^^^^^^^^^^^^^^^^^^
This is how the Websocket logs look like:
- On successful operation start:
.. code-block:: json
{
"timestamp": "2019-06-10T10:52:54.247+0530",
"level": "info",
"type": "websocket-log",
"detail": {
"event": {
"type": "operation",
"detail": {
"request_id": "d2ede87d-5cb7-44b6-8736-1d898117722a",
"operation_id": "1",
"query": {
"variables": {},
"query": "subscription {\n author {\n name\n }\n}\n"
},
"operation_type": {
"type": "started"
},
"operation_name": null
}
},
"connection_info": {
"websocket_id": "f590dd18-75db-4602-8693-8150239df7f7",
"jwt_expiry": null,
"msg": null
},
"user_vars": {
"x-hasura-role": "admin"
}
}
}
- On operation stop:
.. code-block:: json
{
"timestamp": "2019-06-10T11:01:40.939+0530",
"level": "info",
"type": "websocket-log",
"detail": {
"event": {
"type": "operation",
"detail": {
"request_id": null,
"operation_id": "1",
"query": null,
"operation_type": {
"type": "stopped"
},
"operation_name": null
}
},
"connection_info": {
"websocket_id": "7f782190-fd58-4305-a83f-8e17177b204e",
"jwt_expiry": null,
"msg": null
},
"user_vars": {
"x-hasura-role": "admin"
}
}
}
- On error:
.. code-block:: json
{
"timestamp": "2019-06-10T10:55:20.650+0530",
"level": "info",
"type": "websocket-log",
"detail": {
"event": {
"type": "operation",
"detail": {
"request_id": "150e3e6a-e1a7-46ba-a9d4-da6b192a4005",
"operation_id": "1",
"query": {
"variables": {},
"query": "subscription {\n author {\n namex\n }\n}\n"
},
"operation_type": {
"type": "query_err",
"detail": {
"path": "$.selectionSet.author.selectionSet.namex",
"error": "field \"namex\" not found in type: 'author'",
"code": "validation-failed"
}
},
"operation_name": null
}
},
"connection_info": {
"websocket_id": "49932ddf-e54d-42c6-bffb-8a57a1c6dcbe",
"jwt_expiry": null,
"msg": null
},
"user_vars": {
"x-hasura-role": "admin"
}
}
}
Monitoring frameworks
---------------------
You can integrate the logs emitted by Hasura GraphQL with external monitoring tools for better visibility as per
your convenience.
For some examples, see :doc:`../guides/monitoring/index`
For some examples, see :doc:`../guides/monitoring/index`
Migration path of logs from (<= **v1.0.0-beta.2** to newer)
-----------------------------------------------------------
Previously, there were two main kinds of logs for every request - ``http-log`` and ``ws-handler``
for HTTP and websockets respectively. (The other logs being, logs during startup, event-trigger
logs, schema-sync logs, jwk-refresh logs etc.).
The structure of the **http-log** has changed
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Summary of the changes
++++++++++++++++++++++
.. list-table:: **http-log** changes
:header-rows: 1
* - Older
- Newer
* - ``detail.status``
- ``detail.http_info.status``
* - ``detail.http_version``
- ``detail.http_info.version``
* - ``detail.method``
- ``detail.http_info.method``
* - ``detail.url``
- ``detail.http_info.url``
* - ``detail.ip``
- ``detail.http_info.ip``
* - ``detail.query_hash``
- removed
* - ``detail.query_execution_time``
- ``detail.operation.query_execution_time``
* - ``detail.request_id``
- ``detail.operation.request_id``
* - ``detail.response_size``
- ``detail.operation.response_size``
* - ``detail.user``
- ``detail.operation.user_vars``
* - ``detail.detail.error`` (only on error)
- ``detail.operation.error`` (only on error)
* - ``detail.detail.request`` (only on error)
- ``detail.operation.query`` (only on error)
Full example logs
+++++++++++++++++
Older, on success :
.. code-block:: json
{
"timestamp": "2019-06-07T12:04:16.713+0000",
"level": "info",
"type": "http-log",
"detail": {
"status": 200,
"query_hash": "e9006e6750ebaa77da775ae4fc60227d3101b03e",
"http_version": "HTTP/1.1",
"query_execution_time": 0.408548571,
"request_id": "1ad0c61b-1431-410e-818e-99b57822bd2b",
"url": "/v1/graphql",
"ip": "106.51.72.39",
"response_size": 204,
"user": {
"x-hasura-role": "admin"
},
"method": "POST",
"detail": null
}
}
Newer, on success:
.. code-block:: json
{
"timestamp": "2019-05-30T23:40:24.654+0530",
"level": "info",
"type": "http-log",
"detail": {
"operation": {
"query_execution_time": 0.009240042,
"user_vars": {
"x-hasura-role": "user"
},
"request_id": "072b3617-6653-4fd5-b5ee-580e9d098c3d",
"response_size": 105,
"error": null,
"query": null
},
"http_info": {
"status": 200,
"http_version": "HTTP/1.1",
"url": "/v1/graphql",
"ip": "127.0.0.1",
"method": "POST"
}
}
}
Older, on error:
.. code-block:: json
{
"timestamp": "2019-06-07T12:24:05.166+0000",
"level": "info",
"type": "http-log",
"detail": {
"status": 200,
"query_hash": "511894cc797a2b5cef1c84f106a038ea7bc8436d",
"http_version": "HTTP/1.1",
"query_execution_time": 2.34687e-4,
"request_id": "02d695c7-8a2d-4a45-84dd-8b61b7255807",
"url": "/v1/graphql",
"ip": "106.51.72.39",
"response_size": 138,
"user": {
"x-hasura-role": "admin"
},
"method": "POST",
"detail": {
"error": {
"path": "$.selectionSet.todo.selectionSet.completedx",
"error": "field \"completedx\" not found in type: 'todo'",
"code": "validation-failed"
},
"request": "{\"query\":\"query {\\n todo {\\n id\\n title\\n completedx\\n }\\n}\",\"variables\":null}"
}
}
}
Newer, on error:
.. code-block:: json
{
"timestamp": "2019-05-29T15:22:37.834+0530",
"level": "info",
"type": "http-log",
"detail": {
"operation": {
"query_execution_time": 0.000656144,
"user_vars": {
"x-hasura-role": "user",
"x-hasura-user-id": "1"
},
"error": {
"path": "$.selectionSet.profile.selectionSet.usernamex",
"error": "field \"usernamex\" not found in type: 'profile'",
"code": "validation-failed"
},
"request_id": "072b3617-6653-4fd5-b5ee-580e9d098c3d",
"response_size": 142,
"query": {
"variables": {
"limit": 10
},
"operationName": "getProfile",
"query": "query getProfile($limit: Int!) { profile(limit: $limit, where:{username: {_like: \"%a%\"}}) { usernamex} }"
}
},
"http_info": {
"status": 200,
"http_version": "HTTP/1.1",
"url": "/v1/graphql",
"ip": "127.0.0.1",
"method": "POST"
}
}
The structure for **ws-handler** has changed, and has been renamed to **websocket-log**
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Summary of the changes
++++++++++++++++++++++
.. list-table:: **websocket-log** changes
:header-rows: 1
* - Older
- Newer
* - ``detail.websocket_id``
- ``detail.connection_info.websocket_id``
* - ``detail.jwt_expiry``
- ``detail.connection_info.jwt_expiry``
* - ``detail.msg``
- ``detail.connection_info.msg``
* - ``detail.user``
- ``detail.user_vars``
* - ``detail.event.detail``:
.. code-block:: json
[
"1",
null,
{
"type": "started"
}
]
- ``detail.event.detail``:
.. code-block:: json
{
"request_id": "d2ede87d-5cb7-44b6-8736-1d898117722a",
"operation_id": "1",
"operation_type": {
"type": "started"
},
"operation_name": null
}
* - ``detail.event.detail`` (on error):
.. code-block:: json
[
"1",
null,
{
"type": "query_err",
"detail": {
"path": "$.selectionSet.todo.selectionSet.titlex",
"error": "field \"titlex\" not found in type: 'todo'",
"code": "validation-failed"
}
}
]
- ``detail.event.detail`` (on error):
.. code-block:: json
{
"request_id": "150e3e6a-e1a7-46ba-a9d4-da6b192a4005",
"operation_id": "1",
"query": {
"variables": {},
"query": "subscription {\n author {\n namex\n }\n}\n"
},
"operation_type": {
"type": "query_err",
"detail": {
"path": "$.selectionSet.author.selectionSet.namex",
"error": "field \"namex\" not found in type: 'author'",
"code": "validation-failed"
}
},
"operation_name": null
}
Full example logs
+++++++++++++++++
Older, on success:
.. code-block:: json
{
"timestamp": "2019-06-07T12:35:40.652+0000",
"level": "info",
"type": "ws-handler",
"detail": {
"event": {
"type": "operation",
"detail": ["1", null, {
"type": "started"
}]
},
"websocket_id": "11dea559-6554-4598-969a-00b48545950f",
"jwt_expiry": null,
"msg": null,
"user": {
"x-hasura-role": "admin"
}
}
}
Newer, on success:
.. code-block:: json
{
"timestamp": "2019-06-10T10:52:54.247+0530",
"level": "info",
"type": "websocket-log",
"detail": {
"event": {
"type": "operation",
"detail": {
"request_id": "d2ede87d-5cb7-44b6-8736-1d898117722a",
"operation_id": "1",
"query": {
"variables": {},
"query": "subscription {\n author {\n name\n }\n}\n"
},
"operation_type": {
"type": "started"
},
"operation_name": null
}
},
"connection_info": {
"websocket_id": "f590dd18-75db-4602-8693-8150239df7f7",
"jwt_expiry": null,
"msg": null
},
"user_vars": {
"x-hasura-role": "admin"
}
}
}
Older, when operation stops:
.. code-block:: json
{
"timestamp": "2019-06-10T05:30:41.432+0000",
"level": "info",
"type": "ws-handler",
"detail": {
"event": {
"type": "operation",
"detail": ["1", null, {
"type": "stopped"
}]
},
"websocket_id": "3f5721ee-1bc6-424c-841f-8ff8a326d9ef",
"jwt_expiry": null,
"msg": null,
"user": {
"x-hasura-role": "admin"
}
}
}
Newer, when operations stops:
.. code-block:: json
{
"timestamp": "2019-06-10T11:01:40.939+0530",
"level": "info",
"type": "websocket-log",
"detail": {
"event": {
"type": "operation",
"detail": {
"request_id": null,
"operation_id": "1",
"query": null,
"operation_type": {
"type": "stopped"
},
"operation_name": null
}
},
"connection_info": {
"websocket_id": "7f782190-fd58-4305-a83f-8e17177b204e",
"jwt_expiry": null,
"msg": null
},
"user_vars": {
"x-hasura-role": "admin"
}
}
}
Older, on error:
.. code-block:: json
{
"timestamp": "2019-06-07T12:38:07.188+0000",
"level": "info",
"type": "ws-handler",
"detail": {
"event": {
"type": "operation",
"detail": ["1", null, {
"type": "query_err",
"detail": {
"path": "$.selectionSet.todo.selectionSet.titlex",
"error": "field \"titlex\" not found in type: 'todo'",
"code": "validation-failed"
}
}]
},
"websocket_id": "77558d9b-99f8-4c6a-b105-a5b08c96543b",
"jwt_expiry": null,
"msg": null,
"user": {
"x-hasura-role": "admin"
}
}
}
Newer, on error:
.. code-block:: json
{
"timestamp": "2019-06-10T10:55:20.650+0530",
"level": "info",
"type": "websocket-log",
"detail": {
"event": {
"type": "operation",
"detail": {
"request_id": "150e3e6a-e1a7-46ba-a9d4-da6b192a4005",
"operation_id": "1",
"query": {
"variables": {},
"query": "subscription {\n author {\n namex\n }\n}\n"
},
"operation_type": {
"type": "query_err",
"detail": {
"path": "$.selectionSet.author.selectionSet.namex",
"error": "field \"namex\" not found in type: 'author'",
"code": "validation-failed"
}
},
"operation_name": null
}
},
"connection_info": {
"websocket_id": "49932ddf-e54d-42c6-bffb-8a57a1c6dcbe",
"jwt_expiry": null,
"msg": null
},
"user_vars": {
"x-hasura-role": "admin"
}
}
}

1
server/.gitignore vendored
View File

@ -21,6 +21,7 @@ cabal.config
*.hp
TAGS
.stack-work*
*.log
# ws related
ws/

View File

@ -10,7 +10,6 @@ packager_ver := 20190326
pg_dump_ver := 11
project_dir := $(shell pwd)
build_dir := $(project_dir)/$(shell stack path --dist-dir)/build
build_dir_docker := $(project_dir)/$(shell stack --docker path --dist-dir)/build
build_output := /build/_server_output
@ -33,6 +32,7 @@ image: $(project).cabal
.
local-image:
$(eval build_dir_docker := $(project_dir)/$(shell stack --docker path --dist-dir)/build)
stack docker pull
stack --docker build --fast
mkdir -p packaging/build/rootfs

View File

@ -69,8 +69,9 @@ library
, network
, byteorder
-- hashing for logging
-- for parsing RSA keys
, cryptonite
-- for jwt verification
, jose
, pem
@ -242,6 +243,7 @@ library
, Hasura.GraphQL.RemoteServer
, Hasura.GraphQL.Context
, Hasura.GraphQL.Resolve.ContextTypes
, Hasura.GraphQL.Logging
, Hasura.Events.Lib
, Hasura.Events.HTTP

View File

@ -1,4 +1,3 @@
{-# LANGUAGE TypeApplications #-}
module Main where
import Migrate (migrateCatalog)
@ -39,6 +38,7 @@ import Hasura.Server.Logging
import Hasura.Server.Query (peelRun)
import Hasura.Server.SchemaUpdate
import Hasura.Server.Telemetry
import Hasura.Server.Utils
import Hasura.Server.Version (currentVersion)
import qualified Database.PG.Query as Q
@ -86,6 +86,8 @@ parseHGECommand =
<*> parseMxBatchSize
<*> parseFallbackRefetchInt
<*> parseEnableAllowlist
<*> parseEnabledLogs
<*> parseLogLevel
parseArgs :: IO HGEOptions
@ -117,20 +119,21 @@ main = do
(HGEOptionsG rci hgeCmd) <- parseArgs
-- global http manager
httpManager <- HTTP.newManager HTTP.tlsManagerSettings
loggerCtx <- mkLoggerCtx $ defaultLoggerSettings True
instanceId <- mkInstanceId
let logger = mkLogger loggerCtx
pgLogger = mkPGLogger logger
instanceId <- mkInstanceId
case hgeCmd of
HCServe so@(ServeOptions port host cp isoL mAdminSecret mAuthHook
mJwtSecret mUnAuthRole corsCfg enableConsole consoleAssetsDir
enableTelemetry strfyNum enabledAPIs lqOpts enableAL) -> do
enableTelemetry strfyNum enabledAPIs lqOpts enableAL
enabledLogs serverLogLevel) -> do
let sqlGenCtx = SQLGenCtx strfyNum
(loggerCtx, logger, pgLogger) <- mkLoggers enabledLogs serverLogLevel
initTime <- Clock.getCurrentTime
-- log serve options
unLogger logger $ serveOptsToLog so
hloggerCtx <- mkLoggerCtx $ defaultLoggerSettings False
hloggerCtx <- mkLoggerCtx (defaultLoggerSettings False serverLogLevel) enabledLogs
authModeRes <- runExceptT $ mkAuthMode mAdminSecret mAuthHook mJwtSecret
mUnAuthRole httpManager loggerCtx
@ -170,7 +173,7 @@ main = do
eventEngineCtx <- atomically $ initEventEngineCtx maxEvThrds evFetchMilliSec
let scRef = _scrCache cacheRef
unLogger logger $
mkGenericStrLog "event_triggers" "starting workers"
mkGenericStrLog LevelInfo "event_triggers" "starting workers"
void $ C.forkIO $ processEventQueue hloggerCtx logEnvHeaders
httpManager pool scRef eventEngineCtx
@ -179,27 +182,29 @@ main = do
-- start a background thread for telemetry
when enableTelemetry $ do
unLogger logger $ mkGenericStrLog "telemetry" telemetryNotice
unLogger logger $ mkGenericStrLog LevelInfo "telemetry" telemetryNotice
void $ C.forkIO $ runTelemetry logger httpManager scRef initRes
finishTime <- Clock.getCurrentTime
let apiInitTime = realToFrac $ Clock.diffUTCTime finishTime initTime
unLogger logger $
mkGenericStrLog "server" $
"starting API server, took " <> show @Double apiInitTime <> "s"
unLogger logger $ mkGenericLog LevelInfo "server" $
StartupTimeInfo "starting API server" apiInitTime
Warp.runSettings warpSettings app
HCExport -> do
(_, _, pgLogger) <- mkLoggers defaultEnabledLogTypes LevelInfo
ci <- procConnInfo rci
res <- runTx' pgLogger ci fetchMetadata
either printErrJExit printJSON res
HCClean -> do
(_, _, pgLogger) <- mkLoggers defaultEnabledLogTypes LevelInfo
ci <- procConnInfo rci
res <- runTx' pgLogger ci cleanCatalog
either printErrJExit (const cleanSuccess) res
HCExecute -> do
(_, _, pgLogger) <- mkLoggers defaultEnabledLogTypes LevelInfo
queryBs <- BL.getContents
ci <- procConnInfo rci
let sqlGenCtx = SQLGenCtx False
@ -210,6 +215,12 @@ main = do
HCVersion -> putStrLn $ "Hasura GraphQL Engine: " ++ T.unpack currentVersion
where
mkLoggers enabledLogs logLevel = do
loggerCtx <- mkLoggerCtx (defaultLoggerSettings True logLevel) enabledLogs
let logger = mkLogger loggerCtx
pgLogger = mkPGLogger logger
return (loggerCtx, logger, pgLogger)
runTx pool tx =
runExceptT $ Q.runTx pool (Q.Serializable, Nothing) tx
@ -235,18 +246,18 @@ main = do
-- initialise the catalog
initRes <- runAsAdmin pool sqlGenCtx httpMgr $
initCatalogSafe currentTime
either printErrJExit (logger . mkGenericStrLog "db_init") initRes
either printErrJExit (logger . mkGenericStrLog LevelInfo "db_init") initRes
-- migrate catalog if necessary
migRes <- runAsAdmin pool sqlGenCtx httpMgr $
migrateCatalog currentTime
either printErrJExit (logger . mkGenericStrLog "db_migrate") migRes
either printErrJExit (logger . mkGenericStrLog LevelInfo "db_migrate") migRes
-- generate and retrieve uuids
getUniqIds pool
prepareEvents pool (Logger logger) = do
logger $ mkGenericStrLog "event_triggers" "preparing data"
logger $ mkGenericStrLog LevelInfo "event_triggers" "preparing data"
res <- runTx pool unlockAllEvents
either printErrJExit return res

View File

@ -9,24 +9,25 @@ module Hasura.Events.HTTP
, ExtraContext(..)
) where
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.ByteString.Lazy as B
import qualified Data.CaseInsensitive as CI
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.ByteString.Lazy as B
import qualified Data.CaseInsensitive as CI
import Data.Either
import qualified Data.TByteString as TBS
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
import qualified Data.Time.Clock as Time
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as HTTP
import qualified System.Log.FastLogger as FL
import qualified Data.HashSet as Set
import qualified Data.TByteString as TBS
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
import qualified Data.Time.Clock as Time
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as HTTP
import qualified System.Log.FastLogger as FL
import Control.Exception (try)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (MonadReader)
import Control.Exception (try)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Reader (MonadReader)
import Data.Has
import Hasura.Logging
import Hasura.Prelude
@ -53,7 +54,7 @@ data HTTPResp
$(J.deriveToJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''HTTPResp)
instance ToEngineLog HTTPResp where
toEngineLog resp = (LevelInfo, "event-trigger", J.toJSON resp )
toEngineLog resp = (LevelInfo, ELTEventTrigger, J.toJSON resp )
mkHTTPResp :: HTTP.Response B.ByteString -> HTTPResp
mkHTTPResp resp =
@ -76,7 +77,7 @@ data HTTPRespExtra
$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''HTTPRespExtra)
instance ToEngineLog HTTPRespExtra where
toEngineLog resp = (LevelInfo, "event-trigger", J.toJSON resp )
toEngineLog resp = (LevelInfo, ELTEventTrigger, J.toJSON resp )
data HTTPErr
= HClient !HTTP.HttpException
@ -101,7 +102,7 @@ instance J.ToJSON HTTPErr where
, "detail" J..= v]
-- encapsulates a http operation
instance ToEngineLog HTTPErr where
toEngineLog err = (LevelError, "event-trigger", J.toJSON err )
toEngineLog err = (LevelError, ELTEventTrigger, J.toJSON err )
isNetworkError :: HTTPErr -> Bool
isNetworkError = \case
@ -136,7 +137,7 @@ data HTTPReq
$(J.deriveJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''HTTPReq)
instance ToEngineLog HTTPReq where
toEngineLog req = (LevelInfo, "event-trigger", J.toJSON req )
toEngineLog req = (LevelInfo, ELTEventTrigger, J.toJSON req )
runHTTP
:: ( MonadReader r m
@ -155,8 +156,8 @@ runHTTP req exLog = do
return $ either (Left . HClient) anyBodyParser res
mkHLogger :: LoggerCtx -> HLogger
mkHLogger (LoggerCtx loggerSet serverLogLevel timeGetter) (logLevel, logTy, logDet) = do
mkHLogger (LoggerCtx loggerSet serverLogLevel timeGetter enabledLogs) (logLevel, logTy, logDet) = do
localTime <- timeGetter
when (logLevel >= serverLogLevel) $
when (logLevel >= serverLogLevel && logTy `Set.member` enabledLogs) $
FL.pushLogStrLn loggerSet $ FL.toLogStr $
J.encode $ EngineLog localTime logLevel logTy logDet

View File

@ -56,7 +56,7 @@ newtype EventInternalErr
deriving (Show, Eq)
instance L.ToEngineLog EventInternalErr where
toEngineLog (EventInternalErr qerr) = (L.LevelError, "event-trigger", toJSON qerr )
toEngineLog (EventInternalErr qerr) = (L.LevelError, L.ELTEventTrigger, toJSON qerr )
data TriggerMeta
= TriggerMeta { tmName :: TriggerName }

View File

@ -13,6 +13,8 @@ module Hasura.GraphQL.Execute
, EP.initPlanCache
, EP.clearPlanCache
, EP.dumpPlanCache
, ExecutionCtx(..)
) where
import Control.Exception (try)
@ -20,7 +22,6 @@ import Control.Lens
import Data.Has
import qualified Data.Aeson as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.CaseInsensitive as CI
import qualified Data.HashMap.Strict as Map
import qualified Data.HashSet as Set
@ -33,6 +34,7 @@ import qualified Network.Wreq as Wreq
import Hasura.EncJSON
import Hasura.GraphQL.Context
import Hasura.GraphQL.Logging
import Hasura.GraphQL.Resolve.Context
import Hasura.GraphQL.Schema
import Hasura.GraphQL.Transport.HTTP.Protocol
@ -42,16 +44,16 @@ import Hasura.Prelude
import Hasura.RQL.DDL.Headers
import Hasura.RQL.Types
import Hasura.Server.Context
import Hasura.Server.Utils (bsToTxt,
import Hasura.Server.Utils (RequestId,
filterRequestHeaders)
import qualified Hasura.GraphQL.Execute.LiveQuery as EL
import qualified Hasura.GraphQL.Execute.Plan as EP
import qualified Hasura.GraphQL.Execute.Query as EQ
import qualified Hasura.GraphQL.Resolve as GR
import qualified Hasura.GraphQL.Validate as VQ
import qualified Hasura.GraphQL.Validate.Types as VT
import qualified Hasura.Logging as L
-- The current execution plan of a graphql operation, it is
-- currently, either local pg execution or a remote execution
@ -63,6 +65,19 @@ data GQExecPlan a
| GExPRemote !RemoteSchemaInfo !G.TypedOperationDefinition
deriving (Functor, Foldable, Traversable)
-- | Execution context
data ExecutionCtx
= ExecutionCtx
{ _ecxLogger :: !L.Logger
, _ecxSqlGenCtx :: !SQLGenCtx
, _ecxPgExecCtx :: !PGExecCtx
, _ecxPlanCache :: !EP.PlanCache
, _ecxSchemaCache :: !SchemaCache
, _ecxSchemaCacheVer :: !SchemaCacheVer
, _ecxHttpManager :: !HTTP.Manager
, _ecxEnableAllowList :: !Bool
}
-- Enforces the current limitation
assertSameLocationNodes
:: (MonadError QErr m) => [VT.TypeLoc] -> m VT.TypeLoc
@ -145,11 +160,12 @@ getExecPlanPartial userInfo sc enableAL req = do
let msg = "query is not in any of the allowlists"
in e{qeInternal = Just $ J.object [ "message" J..= J.String msg]}
-- An execution operation, in case of
-- queries and mutations it is just a transaction
-- to be executed
data ExecOp
= ExOpQuery !LazyRespTx
= ExOpQuery !LazyRespTx !(Maybe EQ.GeneratedSqlMap)
| ExOpMutation !LazyRespTx
| ExOpSubs !EL.LiveQueryOp
@ -176,8 +192,9 @@ getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
case planM of
-- plans are only for queries and subscriptions
Just plan -> GExPHasura <$> case plan of
EP.RPQuery queryPlan ->
ExOpQuery <$> EQ.queryOpFromPlan usrVars queryVars queryPlan
EP.RPQuery queryPlan -> do
(tx, genSql) <- EQ.queryOpFromPlan usrVars queryVars queryPlan
return $ ExOpQuery tx (Just genSql)
EP.RPSubs subsPlan ->
ExOpSubs <$> EL.subsOpFromPlan pgExecCtx usrVars queryVars subsPlan
Nothing -> noExistingPlan
@ -194,10 +211,10 @@ getResolvedExecPlan pgExecCtx planCache userInfo sqlGenCtx
VQ.RMutation selSet ->
ExOpMutation <$> getMutOp gCtx sqlGenCtx userInfo selSet
VQ.RQuery selSet -> do
(queryTx, planM) <- getQueryOp gCtx sqlGenCtx
userInfo selSet varDefs
(queryTx, planM, genSql) <- getQueryOp gCtx sqlGenCtx
userInfo selSet varDefs
mapM_ (addPlanToCache . EP.RPQuery) planM
return $ ExOpQuery queryTx
return $ ExOpQuery queryTx (Just genSql)
VQ.RSubscription fld -> do
(lqOp, planM) <- getSubsOp pgExecCtx gCtx sqlGenCtx
userInfo reqUnparsed varDefs fld
@ -240,7 +257,7 @@ getQueryOp
-> UserInfo
-> VQ.SelSet
-> [G.VariableDefinition]
-> m (LazyRespTx, Maybe EQ.ReusableQueryPlan)
-> m (LazyRespTx, Maybe EQ.ReusableQueryPlan, EQ.GeneratedSqlMap)
getQueryOp gCtx sqlGenCtx userInfo fields varDefs =
runE gCtx sqlGenCtx userInfo $ EQ.convertQuerySelSet varDefs fields
@ -327,17 +344,22 @@ getSubsOp pgExecCtx gCtx sqlGenCtx userInfo req varDefs fld =
runE gCtx sqlGenCtx userInfo $ getSubsOpM pgExecCtx req varDefs fld
execRemoteGQ
:: (MonadIO m, MonadError QErr m)
=> HTTP.Manager
:: ( MonadIO m
, MonadError QErr m
, MonadReader ExecutionCtx m
)
=> RequestId
-> UserInfo
-> [N.Header]
-> BL.ByteString
-- ^ the raw request string
-> GQLReqUnparsed
-> RemoteSchemaInfo
-> G.TypedOperationDefinition
-> m (HttpResponse EncJSON)
execRemoteGQ manager userInfo reqHdrs q rsi opDef = do
let opTy = G._todType opDef
execRemoteGQ reqId userInfo reqHdrs q rsi opDef = do
execCtx <- ask
let logger = _ecxLogger execCtx
manager = _ecxHttpManager execCtx
opTy = G._todType opDef
when (opTy == G.OperationTypeSubscription) $
throw400 NotSupported "subscription to remote server is not supported"
hdrs <- getHeadersFromConf hdrConf
@ -352,7 +374,9 @@ execRemoteGQ manager userInfo reqHdrs q rsi opDef = do
finalHdrs = foldr Map.union Map.empty hdrMaps
options = wreqOptions manager (Map.toList finalHdrs)
res <- liftIO $ try $ Wreq.postWith options (show url) q
-- log the graphql query
liftIO $ logGraphqlQuery logger $ QueryLog q Nothing reqId
res <- liftIO $ try $ Wreq.postWith options (show url) (J.toJSON q)
resp <- either httpThrow return res
let cookieHdr = getCookieHdr (resp ^? Wreq.responseHeader "Set-Cookie")
respHdrs = Just $ mkRespHeaders cookieHdr

View File

@ -58,6 +58,13 @@ data LQOpts
, _loFallbackOpts :: LQF.FallbackOpts
} deriving (Show, Eq)
-- | Required for logging server configuration on startup
instance J.ToJSON LQOpts where
toJSON (LQOpts mxOpts fbOpts) =
J.object [ "multiplexed_options" J..= mxOpts
, "fallback_options" J..= fbOpts
]
mkLQOpts :: LQM.MxOpts -> LQF.FallbackOpts -> LQOpts
mkLQOpts = LQOpts

View File

@ -2,6 +2,8 @@ module Hasura.GraphQL.Execute.Query
( convertQuerySelSet
, queryOpFromPlan
, ReusableQueryPlan
, GeneratedSqlMap
, PreparedSql(..)
) where
import Data.Has
@ -106,11 +108,12 @@ getReusablePlan (QueryPlan vars fldPlans) =
varTypes = Map.unions $ map (varTypesOfPlan . snd) fldPlans
withPlan
:: UserVars -> PGPlan -> GV.AnnPGVarVals -> RespTx
:: (MonadError QErr m)
=> UserVars -> PGPlan -> GV.AnnPGVarVals -> m PreparedSql
withPlan usrVars (PGPlan q reqVars prepMap) annVars = do
prepMap' <- foldM getVar prepMap (Map.toList reqVars)
let args = withUserVars usrVars $ IntMap.elems prepMap'
asSingleRowJsonResp q args
return $ PreparedSql q args
where
getVar accum (var, (prepNo, _)) = do
let varName = G.unName $ G.unVariable var
@ -121,18 +124,21 @@ withPlan usrVars (PGPlan q reqVars prepMap) annVars = do
-- turn the current plan into a transaction
mkCurPlanTx
:: UserVars
:: (MonadError QErr m)
=> UserVars
-> QueryPlan
-> LazyRespTx
mkCurPlanTx usrVars (QueryPlan _ fldPlans) =
fmap encJFromAssocList $ forM fldPlans $ \(alias, fldPlan) -> do
-> m (LazyRespTx, GeneratedSqlMap)
mkCurPlanTx usrVars (QueryPlan _ fldPlans) = do
-- generate the SQL and prepared vars or the bytestring
resolved <- forM fldPlans $ \(alias, fldPlan) -> do
fldResp <- case fldPlan of
RFPRaw resp -> return $ encJFromBS resp
RFPPostgres pgPlan -> liftTx $ planTx pgPlan
return (G.unName $ G.unAlias alias, fldResp)
where
planTx (PGPlan q _ prepMap) =
asSingleRowJsonResp q $ withUserVars usrVars $ IntMap.elems prepMap
RFPRaw resp -> return $ RRRaw resp
RFPPostgres (PGPlan q _ prepMap) -> do
let args = withUserVars usrVars $ IntMap.elems prepMap
return $ RRSql $ PreparedSql q args
return (alias, fldResp)
return (mkLazyRespTx resolved, mkGeneratedSqlMap resolved)
withUserVars :: UserVars -> [Q.PrepArg] -> [Q.PrepArg]
withUserVars usrVars l =
@ -211,7 +217,7 @@ convertQuerySelSet
)
=> [G.VariableDefinition]
-> V.SelSet
-> m (LazyRespTx, Maybe ReusableQueryPlan)
-> m (LazyRespTx, Maybe ReusableQueryPlan, GeneratedSqlMap)
convertQuerySelSet varDefs fields = do
usrVars <- asks (userVars . getter)
fldPlans <- forM (toList fields) $ \fld -> do
@ -228,7 +234,8 @@ convertQuerySelSet varDefs fields = do
return (V._fAlias fld, fldPlan)
let queryPlan = QueryPlan varDefs fldPlans
reusablePlanM = getReusablePlan queryPlan
return (mkCurPlanTx usrVars queryPlan, reusablePlanM)
(tx, sql) <- mkCurPlanTx usrVars queryPlan
return (tx, reusablePlanM, sql)
-- use the existing plan and new variables to create a pg query
queryOpFromPlan
@ -236,12 +243,58 @@ queryOpFromPlan
=> UserVars
-> Maybe GH.VariableValues
-> ReusableQueryPlan
-> m LazyRespTx
-> m (LazyRespTx, GeneratedSqlMap)
queryOpFromPlan usrVars varValsM (ReusableQueryPlan varTypes fldPlans) = do
validatedVars <- GV.getAnnPGVarVals varTypes varValsM
let tx = fmap encJFromAssocList $ forM fldPlans $ \(alias, fldPlan) -> do
fldResp <- case fldPlan of
RFPRaw resp -> return $ encJFromBS resp
RFPPostgres pgPlan -> liftTx $ withPlan usrVars pgPlan validatedVars
return (G.unName $ G.unAlias alias, fldResp)
return tx
-- generate the SQL and prepared vars or the bytestring
resolved <- forM fldPlans $ \(alias, fldPlan) ->
(alias,) <$> case fldPlan of
RFPRaw resp -> return $ RRRaw resp
RFPPostgres pgPlan -> RRSql <$> withPlan usrVars pgPlan validatedVars
return (mkLazyRespTx resolved, mkGeneratedSqlMap resolved)
data PreparedSql
= PreparedSql
{ _psQuery :: !Q.Query
, _psPrepArgs :: ![Q.PrepArg]
}
-- | Required to log in `query-log`
instance J.ToJSON PreparedSql where
toJSON (PreparedSql q prepArgs) =
J.object [ "query" J..= Q.getQueryText q
, "prepared_arguments" J..= fmap prepArgsJVal prepArgs
]
where
prepArgsJVal (_, arg) = fmap (bsToTxt . fst) arg
-- | Intermediate reperesentation of a computed SQL statement and prepared
-- arguments, or a raw bytestring (mostly, for introspection responses)
-- From this intermediate representation, a `LazyTx` can be generated, or the
-- SQL can be logged etc.
data ResolvedQuery
= RRRaw !B.ByteString
| RRSql !PreparedSql
-- | The computed SQL with alias which can be logged. Nothing here represents no
-- SQL for cases like introspection responses. Tuple of alias to a (maybe)
-- prepared statement
type GeneratedSqlMap = [(G.Alias, Maybe PreparedSql)]
mkLazyRespTx :: [(G.Alias, ResolvedQuery)] -> LazyRespTx
mkLazyRespTx resolved =
fmap encJFromAssocList $ forM resolved $ \(alias, node) -> do
resp <- case node of
RRRaw bs -> return $ encJFromBS bs
RRSql (PreparedSql q args) -> liftTx $ asSingleRowJsonResp q args
return (G.unName $ G.unAlias alias, resp)
mkGeneratedSqlMap :: [(G.Alias, ResolvedQuery)] -> GeneratedSqlMap
mkGeneratedSqlMap resolved =
flip map resolved $ \(alias, node) ->
let res = case node of
RRRaw _ -> Nothing
RRSql ps -> Just ps
in (alias, res)

View File

@ -113,7 +113,7 @@ explainGQLQuery
-> Bool
-> GQLExplain
-> m EncJSON
explainGQLQuery pgExecCtx sc sqlGenCtx enableAL (GQLExplain query userVarsRaw)= do
explainGQLQuery pgExecCtx sc sqlGenCtx enableAL (GQLExplain query userVarsRaw) = do
execPlan <- E.getExecPlanPartial userInfo sc enableAL query
(gCtx, rootSelSet) <- case execPlan of
E.GExPHasura (gCtx, rootSelSet, _) ->
@ -129,6 +129,7 @@ explainGQLQuery pgExecCtx sc sqlGenCtx enableAL (GQLExplain query userVarsRaw)=
throw400 InvalidParams "only queries can be explained"
GV.RSubscription _ ->
throw400 InvalidParams "only queries can be explained"
where
usrVars = mkUserVars $ maybe [] Map.toList userVarsRaw
usrVars = mkUserVars $ maybe [] Map.toList userVarsRaw
userInfo = mkUserInfo (fromMaybe adminRole $ roleFromVars usrVars) usrVars

View File

@ -0,0 +1,59 @@
{-|
This module holds functions and data types used for logging at the GraphQL
layer. In contrast with, logging at the HTTP server layer.
-}
module Hasura.GraphQL.Logging
( logGraphqlQuery
, QueryLog(..)
) where
import qualified Data.Aeson as J
import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Transport.HTTP.Protocol (GQLReqUnparsed)
import Hasura.Prelude
import Hasura.Server.Utils (RequestId)
import qualified Hasura.GraphQL.Execute.Query as EQ
import qualified Hasura.Logging as L
-- | A GraphQL query, optionally generated SQL, and the request id makes up the
-- | 'QueryLog'
data QueryLog
= QueryLog
{ _qlQuery :: !GQLReqUnparsed
, _qlGeneratedSql :: !(Maybe EQ.GeneratedSqlMap)
, _qlRequestId :: !RequestId
}
instance J.ToJSON QueryLog where
toJSON (QueryLog q sql reqId) =
J.object [ "query" J..= q
, "generated_sql" J..= (encodeSql <$> sql)
, "request_id" J..= reqId
]
instance L.ToEngineLog QueryLog where
toEngineLog ql = (L.LevelInfo, L.ELTQueryLog, J.toJSON ql)
-- | Helper function to convert the list of alias to generated SQL into a
-- | key-value map to be printed as JSON
encodeSql :: EQ.GeneratedSqlMap -> J.Value
encodeSql sql =
jValFromAssocList $ map (\(a, q) -> (alName a, fmap J.toJSON q)) sql
where
alName = G.unName . G.unAlias
jValFromAssocList xs = J.object $ map (uncurry (J..=)) xs
{-|
Function to log a 'QueryLog'. This is meant to be used in execution of a
GraphQL query to log the GraphQL query and optionally the generated SQL.
-}
logGraphqlQuery
:: (MonadIO m)
=> L.Logger
-> QueryLog
-> m ()
logGraphqlQuery logger = liftIO . L.unLogger logger

View File

@ -22,7 +22,7 @@ import qualified Network.Wreq as Wreq
import Hasura.HTTP (wreqOptions)
import Hasura.RQL.DDL.Headers (getHeadersFromConf)
import Hasura.RQL.Types
import Hasura.Server.Utils (bsToTxt, httpExceptToJSON)
import Hasura.Server.Utils (httpExceptToJSON)
import qualified Hasura.GraphQL.Context as GC
import qualified Hasura.GraphQL.Schema as GS

View File

@ -2,53 +2,58 @@ module Hasura.GraphQL.Transport.HTTP
( runGQ
) where
import qualified Data.ByteString.Lazy as BL
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as N
import Hasura.EncJSON
import Hasura.GraphQL.Logging
import Hasura.GraphQL.Transport.HTTP.Protocol
import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.Server.Context
import Hasura.Server.Utils (RequestId)
import qualified Hasura.GraphQL.Execute as E
runGQ
:: (MonadIO m, MonadError QErr m)
=> PGExecCtx
:: ( MonadIO m
, MonadError QErr m
, MonadReader E.ExecutionCtx m
)
=> RequestId
-> UserInfo
-> SQLGenCtx
-> Bool
-> E.PlanCache
-> SchemaCache
-> SchemaCacheVer
-> HTTP.Manager
-> [N.Header]
-> GQLReqUnparsed
-> BL.ByteString -- this can be removed when we have a pretty-printer
-> m (HttpResponse EncJSON)
runGQ pgExecCtx userInfo sqlGenCtx enableAL planCache sc scVer
manager reqHdrs req rawReq = do
runGQ reqId userInfo reqHdrs req = do
E.ExecutionCtx _ sqlGenCtx pgExecCtx planCache sc scVer _ enableAL <- ask
execPlan <- E.getResolvedExecPlan pgExecCtx planCache
userInfo sqlGenCtx enableAL sc scVer req
case execPlan of
E.GExPHasura resolvedOp ->
flip HttpResponse Nothing <$> runHasuraGQ pgExecCtx userInfo resolvedOp
flip HttpResponse Nothing <$> runHasuraGQ reqId req userInfo resolvedOp
E.GExPRemote rsi opDef ->
E.execRemoteGQ manager userInfo reqHdrs rawReq rsi opDef
E.execRemoteGQ reqId userInfo reqHdrs req rsi opDef
runHasuraGQ
:: (MonadIO m, MonadError QErr m)
=> PGExecCtx
:: ( MonadIO m
, MonadError QErr m
, MonadReader E.ExecutionCtx m
)
=> RequestId
-> GQLReqUnparsed
-> UserInfo
-> E.ExecOp
-> m EncJSON
runHasuraGQ pgExecCtx userInfo resolvedOp = do
runHasuraGQ reqId query userInfo resolvedOp = do
E.ExecutionCtx logger _ pgExecCtx _ _ _ _ _ <- ask
respE <- liftIO $ runExceptT $ case resolvedOp of
E.ExOpQuery tx ->
E.ExOpQuery tx genSql -> do
-- log the generated SQL and the graphql query
liftIO $ logGraphqlQuery logger $ QueryLog query genSql reqId
runLazyTx' pgExecCtx tx
E.ExOpMutation tx ->
E.ExOpMutation tx -> do
-- log the graphql query
liftIO $ logGraphqlQuery logger $ QueryLog query Nothing reqId
runLazyTx pgExecCtx $ withUserInfo userInfo tx
E.ExOpSubs _ ->
throw400 UnexpectedPayload

View File

@ -29,20 +29,24 @@ import qualified StmContainers.Map as STMMap
import Control.Concurrent (threadDelay)
import Hasura.EncJSON
import qualified Hasura.GraphQL.Execute as E
import qualified Hasura.GraphQL.Execute.LiveQuery as LQ
import Hasura.GraphQL.Logging
import Hasura.GraphQL.Transport.HTTP.Protocol
import Hasura.GraphQL.Transport.WebSocket.Protocol
import qualified Hasura.GraphQL.Transport.WebSocket.Server as WS
import qualified Hasura.Logging as L
import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.RQL.Types.Error (Code (StartFailed))
import Hasura.Server.Auth (AuthMode, getUserInfoWithExpTime)
import Hasura.Server.Context
import Hasura.Server.Cors
import Hasura.Server.Utils (bsToTxt,
diffTimeToMicro)
import Hasura.Server.Utils (RequestId,
diffTimeToMicro,
getRequestId)
import qualified Hasura.GraphQL.Execute as E
import qualified Hasura.GraphQL.Execute.LiveQuery as LQ
import qualified Hasura.GraphQL.Transport.WebSocket.Server as WS
import qualified Hasura.Logging as L
type OperationMap
= STMMap.Map OperationId (LQ.LiveQueryId, Maybe OperationName)
@ -95,11 +99,21 @@ $(J.deriveToJSON
}
''OpDetail)
data OperationDetails
= OperationDetails
{ _odOperationId :: !OperationId
, _odRequestId :: !(Maybe RequestId)
, _odOperationName :: !(Maybe OperationName)
, _odOperationType :: !OpDetail
, _odQuery :: !(Maybe GQLReqUnparsed)
} deriving (Show, Eq)
$(J.deriveToJSON (J.aesonDrop 3 J.snakeCase) ''OperationDetails)
data WSEvent
= EAccepted
| ERejected !QErr
| EConnErr !ConnErrMsg
| EOperation !OperationId !(Maybe OperationName) !OpDetail
| EOperation !OperationDetails
| EClosed
deriving (Show, Eq)
$(J.deriveToJSON
@ -108,19 +122,38 @@ $(J.deriveToJSON
}
''WSEvent)
data WsConnInfo
= WsConnInfo
{ _wsciWebsocketId :: !WS.WSId
, _wsciJwtExpiry :: !(Maybe TC.UTCTime)
, _wsciMsg :: !(Maybe Text)
} deriving (Show, Eq)
$(J.deriveToJSON (J.aesonDrop 5 J.snakeCase) ''WsConnInfo)
data WSLogInfo
= WSLogInfo
{ _wsliUserVars :: !(Maybe UserVars)
, _wsliConnectionInfo :: !WsConnInfo
, _wsliEvent :: !WSEvent
} deriving (Show, Eq)
$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''WSLogInfo)
data WSLog
= WSLog
{ _wslWebsocketId :: !WS.WSId
, _wslUser :: !(Maybe UserVars)
, _wslJwtExpiry :: !(Maybe TC.UTCTime)
, _wslEvent :: !WSEvent
, _wslMsg :: !(Maybe Text)
} deriving (Show, Eq)
$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''WSLog)
{ _wslLogLevel :: !L.LogLevel
, _wslInfo :: !WSLogInfo
}
instance L.ToEngineLog WSLog where
toEngineLog wsLog =
(L.LevelInfo, "ws-handler", J.toJSON wsLog)
toEngineLog (WSLog logLevel wsLog) =
(logLevel, L.ELTWebsocketLog, J.toJSON wsLog)
mkWsInfoLog :: Maybe UserVars -> WsConnInfo -> WSEvent -> WSLog
mkWsInfoLog uv ci ev =
WSLog L.LevelInfo $ WSLogInfo uv ci ev
mkWsErrorLog :: Maybe UserVars -> WsConnInfo -> WSEvent -> WSLog
mkWsErrorLog uv ci ev =
WSLog L.LevelError $ WSLogInfo uv ci ev
data WSServerEnv
= WSServerEnv
@ -162,7 +195,7 @@ onConn (L.Logger logger) corsPolicy wsId requestHead = do
threadDelay $ diffTimeToMicro $ TC.diffUTCTime expTime currTime
accept hdrs errType = do
logger $ WSLog wsId Nothing Nothing EAccepted Nothing
logger $ mkWsInfoLog Nothing (WsConnInfo wsId Nothing Nothing) EAccepted
connData <- WSConnData
<$> STM.newTVarIO (CSNotInitialised hdrs)
<*> STMMap.newIO
@ -173,7 +206,7 @@ onConn (L.Logger logger) corsPolicy wsId requestHead = do
(Just keepAliveAction) (Just jwtExpiryHandler)
reject qErr = do
logger $ WSLog wsId Nothing Nothing (ERejected qErr) Nothing
logger $ mkWsErrorLog Nothing (WsConnInfo wsId Nothing Nothing) (ERejected qErr)
return $ Left $ WS.RejectRequest
(H.statusCode $ qeStatus qErr)
(H.statusMessage $ qeStatus qErr) []
@ -195,7 +228,8 @@ onConn (L.Logger logger) corsPolicy wsId requestHead = do
if readCookie
then return reqHdrs
else do
liftIO $ logger $ WSLog wsId Nothing Nothing EAccepted (Just corsNote)
liftIO $ logger $
mkWsInfoLog Nothing (WsConnInfo wsId Nothing (Just corsNote)) EAccepted
return $ filter (\h -> fst h /= "Cookie") reqHdrs
CCAllowedOrigins ds
-- if the origin is in our cors domains, no error
@ -221,8 +255,8 @@ onConn (L.Logger logger) corsPolicy wsId requestHead = do
<> "HASURA_GRAPHQL_WS_READ_COOKIE to force read cookie when CORS is disabled."
onStart :: WSServerEnv -> WSConn -> StartMsg -> BL.ByteString -> IO ()
onStart serverEnv wsConn (StartMsg opId q) msgRaw = catchAndIgnore $ do
onStart :: WSServerEnv -> WSConn -> StartMsg -> IO ()
onStart serverEnv wsConn (StartMsg opId q) = catchAndIgnore $ do
opM <- liftIO $ STM.atomically $ STMMap.lookup opId opMap
@ -239,71 +273,79 @@ onStart serverEnv wsConn (StartMsg opId q) msgRaw = catchAndIgnore $ do
let e = "start received before the connection is initialised"
withComplete $ sendStartErr e
requestId <- getRequestId reqHdrs
(sc, scVer) <- liftIO $ IORef.readIORef gCtxMapRef
execPlanE <- runExceptT $ E.getResolvedExecPlan pgExecCtx
planCache userInfo sqlGenCtx enableAL sc scVer q
execPlan <- either (withComplete . preExecErr) return execPlanE
execPlan <- either (withComplete . preExecErr requestId) return execPlanE
let execCtx = E.ExecutionCtx logger sqlGenCtx pgExecCtx
planCache sc scVer httpMgr enableAL
case execPlan of
E.GExPHasura resolvedOp ->
runHasuraGQ userInfo resolvedOp
runHasuraGQ requestId q userInfo resolvedOp
E.GExPRemote rsi opDef ->
runRemoteGQ userInfo reqHdrs opDef rsi
runRemoteGQ execCtx requestId userInfo reqHdrs opDef rsi
where
runHasuraGQ :: UserInfo -> E.ExecOp -> ExceptT () IO ()
runHasuraGQ userInfo = \case
E.ExOpQuery opTx ->
execQueryOrMut $ runLazyTx' pgExecCtx opTx
runHasuraGQ :: RequestId -> GQLReqUnparsed -> UserInfo -> E.ExecOp
-> ExceptT () IO ()
runHasuraGQ reqId query userInfo = \case
E.ExOpQuery opTx genSql ->
execQueryOrMut reqId query genSql $ runLazyTx' pgExecCtx opTx
E.ExOpMutation opTx ->
execQueryOrMut $ runLazyTx pgExecCtx $
withUserInfo userInfo opTx
execQueryOrMut reqId query Nothing $
runLazyTx pgExecCtx $ withUserInfo userInfo opTx
E.ExOpSubs lqOp -> do
-- log the graphql query
liftIO $ logGraphqlQuery logger $ QueryLog query Nothing reqId
lqId <- liftIO $ LQ.addLiveQuery lqMap lqOp liveQOnChange
liftIO $ STM.atomically $
STMMap.insert (lqId, _grOperationName q) opId opMap
logOpEv ODStarted
logOpEv ODStarted (Just reqId)
execQueryOrMut action = do
logOpEv ODStarted
execQueryOrMut reqId query genSql action = do
logOpEv ODStarted (Just reqId)
-- log the generated SQL and the graphql query
liftIO $ logGraphqlQuery logger $ QueryLog query genSql reqId
resp <- liftIO $ runExceptT action
either postExecErr sendSuccResp resp
sendCompleted
either (postExecErr reqId) sendSuccResp resp
sendCompleted (Just reqId)
runRemoteGQ :: UserInfo -> [H.Header]
runRemoteGQ :: E.ExecutionCtx -> RequestId -> UserInfo -> [H.Header]
-> G.TypedOperationDefinition -> RemoteSchemaInfo
-> ExceptT () IO ()
runRemoteGQ userInfo reqHdrs opDef rsi = do
runRemoteGQ execCtx reqId userInfo reqHdrs opDef rsi = do
when (G._todType opDef == G.OperationTypeSubscription) $
withComplete $ preExecErr $
withComplete $ preExecErr reqId $
err400 NotSupported "subscription to remote server is not supported"
-- if it's not a subscription, use HTTP to execute the query on the remote
-- server
-- try to parse the (apollo protocol) websocket frame and get only the
-- payload
sockPayload <- onLeft (J.eitherDecode msgRaw) $
const $ withComplete $ preExecErr $
err500 Unexpected "invalid websocket payload"
let payload = J.encode $ _wpPayload sockPayload
resp <- runExceptT $ E.execRemoteGQ httpMgr userInfo reqHdrs
payload rsi opDef
either postExecErr (sendRemoteResp . _hrBody) resp
sendCompleted
resp <- runExceptT $ flip runReaderT execCtx $
E.execRemoteGQ reqId userInfo reqHdrs q rsi opDef
either (postExecErr reqId) (sendRemoteResp reqId . _hrBody) resp
sendCompleted (Just reqId)
sendRemoteResp resp =
sendRemoteResp reqId resp =
case J.eitherDecodeStrict (encJToBS resp) of
Left e -> postExecErr $ invalidGqlErr $ T.pack e
Left e -> postExecErr reqId $ invalidGqlErr $ T.pack e
Right res -> sendMsg wsConn $ SMData $ DataMsg opId (GRRemote res)
invalidGqlErr err = err500 Unexpected $
"Failed parsing GraphQL response from remote: " <> err
WSServerEnv logger pgExecCtx lqMap gCtxMapRef httpMgr _
sqlGenCtx planCache _ enableAL = serverEnv
WSServerEnv logger pgExecCtx lqMap gCtxMapRef httpMgr _ sqlGenCtx planCache
_ enableAL = serverEnv
WSConnData userInfoR opMap errRespTy = WS.getData wsConn
logOpEv opDet =
logWSEvent logger wsConn $ EOperation opId (_grOperationName q) opDet
logOpEv opTy reqId =
logWSEvent logger wsConn $ EOperation opDet
where
opDet = OperationDetails opId reqId (_grOperationName q) opTy query
-- log the query only in errors
query = case opTy of
ODQueryErr _ -> Just q
_ -> Nothing
getErrFn errTy =
case errTy of
@ -314,22 +356,22 @@ onStart serverEnv wsConn (StartMsg opId q) msgRaw = catchAndIgnore $ do
let errFn = getErrFn errRespTy
sendMsg wsConn $ SMErr $ ErrorMsg opId $ errFn False $
err400 StartFailed e
logOpEv $ ODProtoErr e
logOpEv (ODProtoErr e) Nothing
sendCompleted = do
sendCompleted reqId = do
sendMsg wsConn $ SMComplete $ CompletionMsg opId
logOpEv ODCompleted
logOpEv ODCompleted reqId
postExecErr qErr = do
postExecErr reqId qErr = do
let errFn = getErrFn errRespTy
logOpEv $ ODQueryErr qErr
logOpEv (ODQueryErr qErr) (Just reqId)
sendMsg wsConn $ SMData $ DataMsg opId $
GRHasura $ GQExecError $ pure $ errFn False qErr
-- why wouldn't pre exec error use graphql response?
preExecErr qErr = do
preExecErr reqId qErr = do
let errFn = getErrFn errRespTy
logOpEv $ ODQueryErr qErr
logOpEv (ODQueryErr qErr) (Just reqId)
let err = case errRespTy of
ERTLegacy -> errFn False qErr
ERTGraphqlCompliant -> J.object ["errors" J..= [errFn False qErr]]
@ -342,7 +384,7 @@ onStart serverEnv wsConn (StartMsg opId q) msgRaw = catchAndIgnore $ do
withComplete :: ExceptT () IO () -> ExceptT () IO a
withComplete action = do
action
sendCompleted
sendCompleted Nothing
throwError ()
-- on change, send message on the websocket
@ -368,7 +410,7 @@ onMessage authMode serverEnv wsConn msgRaw =
CMConnInit params -> onConnInit (_wseLogger serverEnv)
(_wseHManager serverEnv)
wsConn authMode params
CMStart startMsg -> onStart serverEnv wsConn startMsg msgRaw
CMStart startMsg -> onStart serverEnv wsConn startMsg
CMStop stopMsg -> onStop serverEnv wsConn stopMsg
CMConnTerm -> WS.closeConn wsConn "GQL_CONNECTION_TERMINATE received"
where
@ -380,7 +422,7 @@ onStop serverEnv wsConn (StopMsg opId) = do
opM <- liftIO $ STM.atomically $ STMMap.lookup opId opMap
case opM of
Just (lqId, opNameM) -> do
logWSEvent logger wsConn $ EOperation opId opNameM ODStopped
logWSEvent logger wsConn $ EOperation $ opDet opNameM
LQ.removeLiveQuery lqMap lqId
Nothing -> return ()
STM.atomically $ STMMap.delete opId opMap
@ -388,6 +430,7 @@ onStop serverEnv wsConn (StopMsg opId) = do
logger = _wseLogger serverEnv
lqMap = _wseLiveQMap serverEnv
opMap = _wscOpMap $ WS.getData wsConn
opDet n = OperationDetails opId Nothing n ODStopped Nothing
logWSEvent
:: (MonadIO m)
@ -399,10 +442,22 @@ logWSEvent (L.Logger logger) wsConn wsEv = do
, jwtM
)
_ -> (Nothing, Nothing)
liftIO $ logger $ WSLog wsId userVarsM jwtExpM wsEv Nothing
liftIO $ logger $ WSLog logLevel $ WSLogInfo userVarsM (WsConnInfo wsId jwtExpM Nothing) wsEv
where
WSConnData userInfoR _ _ = WS.getData wsConn
wsId = WS.getWSId wsConn
logLevel = bool L.LevelInfo L.LevelError isError
isError = case wsEv of
EAccepted -> False
ERejected _ -> True
EConnErr _ -> True
EClosed -> False
EOperation op -> case _odOperationType op of
ODStarted -> False
ODProtoErr _ -> True
ODQueryErr _ -> True
ODCompleted -> False
ODStopped -> False
onConnInit
:: (MonadIO m)
@ -464,9 +519,9 @@ createWSServerEnv
createWSServerEnv logger pgExecCtx lqState cacheRef httpManager
corsPolicy sqlGenCtx enableAL planCache = do
wsServer <- STM.atomically $ WS.createWSServer logger
return $ WSServerEnv logger
pgExecCtx lqState cacheRef
httpManager corsPolicy sqlGenCtx planCache wsServer enableAL
return $
WSServerEnv logger pgExecCtx lqState cacheRef httpManager corsPolicy
sqlGenCtx planCache wsServer enableAL
createWSServerApp :: AuthMode -> WSServerEnv -> WS.ServerApp
createWSServerApp authMode serverEnv =
@ -477,18 +532,3 @@ createWSServerApp authMode serverEnv =
(onConn (_wseLogger serverEnv) (_wseCorsPolicy serverEnv))
(onMessage authMode serverEnv)
(onClose (_wseLogger serverEnv) $ _wseLiveQMap serverEnv)
-- | TODO:
-- | The following ADT is required so that we can parse the incoming websocket
-- | frame, and only pick the payload, for remote schema queries.
-- | Ideally we should use `StartMsg` from Websocket.Protocol, but as
-- | `GraphQLRequest` doesn't have a ToJSON instance we are using our own type to
-- | get only the payload
data WebsocketPayload
= WebsocketPayload
{ _wpId :: !Text
, _wpType :: !Text
, _wpPayload :: !J.Value
} deriving (Show, Eq)
$(J.deriveJSON (J.aesonDrop 3 J.snakeCase) ''WebsocketPayload)

View File

@ -72,7 +72,7 @@ $(J.deriveToJSON (J.aesonDrop 4 J.snakeCase) ''WSLog)
instance L.ToEngineLog WSLog where
toEngineLog wsLog =
(L.LevelDebug, "ws-server", J.toJSON wsLog)
(L.LevelDebug, L.ELTWsServer, J.toJSON wsLog)
data WSConn a
= WSConn

View File

@ -14,7 +14,6 @@ import qualified Network.HTTP.Types as HTTP
import qualified Network.Wreq as Wreq
import Data.CaseInsensitive (original)
import Hasura.Server.Utils (bsToTxt)
import Hasura.Server.Version (currentVersion)
hdrsToText :: [HTTP.Header] -> [(Text, Text)]

View File

@ -4,7 +4,10 @@ module Hasura.Logging
( LoggerSettings(..)
, defaultLoggerSettings
, EngineLog(..)
, EngineLogType
, EngineLogType(..)
, defaultEnabledLogTypes
, alwaysOnLogTypes
, userAllowedLogTypes
, ToEngineLog(..)
, debugT
, debugBS
@ -25,6 +28,7 @@ import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.ByteString as B
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashSet as Set
import qualified Data.TByteString as TBS
import qualified Data.Text as T
import qualified Data.Time.Clock as Time
@ -32,13 +36,64 @@ import qualified Data.Time.Format as Format
import qualified Data.Time.LocalTime as Time
import qualified System.Log.FastLogger as FL
import Hasura.Server.Utils (hyphenate)
newtype FormattedTime
= FormattedTime { _unFormattedTime :: Text }
deriving (Show, Eq, J.ToJSON)
newtype EngineLogType
= EngineLogType { _unEngineLogType :: Text }
deriving (Show, Eq, J.ToJSON, J.FromJSON, IsString)
data EngineLogType
= ELTHttpLog
| ELTWebsocketLog
| ELTWebhookLog
| ELTQueryLog
| ELTStartup
-- internal log types
| ELTPgClient
| ELTMetadata
| ELTJwkRefreshLog
| ELTTelemetryLog
| ELTEventTrigger
| ELTWsServer
| ELTSchemaSyncThread
| ELTUnstructured
deriving (Show, Eq, Generic)
instance Hashable EngineLogType
$(J.deriveJSON J.defaultOptions {
J.constructorTagModifier = hyphenate . drop 3
} ''EngineLogType)
-- | Log types that can't be disabled/enabled by the user, they are always
-- enabled
alwaysOnLogTypes :: Set.HashSet EngineLogType
alwaysOnLogTypes = Set.fromList
[ ELTPgClient
, ELTMetadata
, ELTJwkRefreshLog
, ELTTelemetryLog
, ELTEventTrigger
, ELTWsServer
, ELTSchemaSyncThread
, ELTUnstructured
]
-- the default enabled log-types
defaultEnabledLogTypes :: Set.HashSet EngineLogType
defaultEnabledLogTypes =
Set.union alwaysOnLogTypes $
Set.fromList [ELTStartup, ELTHttpLog, ELTWebhookLog, ELTWebsocketLog]
-- log types that can be set by the user
userAllowedLogTypes :: [EngineLogType]
userAllowedLogTypes =
[ ELTStartup
, ELTHttpLog
, ELTWebhookLog
, ELTWebsocketLog
, ELTQueryLog
]
data LogLevel
= LevelDebug
@ -81,16 +136,17 @@ debugLBS = UnstructuredLog . TBS.fromLBS
instance ToEngineLog UnstructuredLog where
toEngineLog (UnstructuredLog t) =
(LevelDebug, "unstructured", J.toJSON t)
(LevelDebug, ELTUnstructured, J.toJSON t)
class ToEngineLog a where
toEngineLog :: a -> (LogLevel, EngineLogType, J.Value)
data LoggerCtx
= LoggerCtx
{ _lcLoggerSet :: !FL.LoggerSet
, _lcLogLevel :: !LogLevel
, _lcTimeGetter :: !(IO FormattedTime)
{ _lcLoggerSet :: !FL.LoggerSet
, _lcLogLevel :: !LogLevel
, _lcTimeGetter :: !(IO FormattedTime)
, _lcEnabledLogTypes :: !(Set.HashSet EngineLogType)
}
data LoggerSettings
@ -101,9 +157,9 @@ data LoggerSettings
, _lsLevel :: !LogLevel
} deriving (Show, Eq)
defaultLoggerSettings :: Bool -> LoggerSettings
defaultLoggerSettings :: Bool -> LogLevel -> LoggerSettings
defaultLoggerSettings isCached =
LoggerSettings isCached Nothing LevelInfo
LoggerSettings isCached Nothing
getFormattedTime :: Maybe Time.TimeZone -> IO FormattedTime
getFormattedTime tzM = do
@ -116,11 +172,11 @@ getFormattedTime tzM = do
format = "%FT%H:%M:%S%3Q%z"
-- format = Format.iso8601DateFormat (Just "%H:%M:%S")
mkLoggerCtx :: LoggerSettings -> IO LoggerCtx
mkLoggerCtx (LoggerSettings cacheTime tzM logLevel) = do
mkLoggerCtx :: LoggerSettings -> Set.HashSet EngineLogType -> IO LoggerCtx
mkLoggerCtx (LoggerSettings cacheTime tzM logLevel) enabledLogs = do
loggerSet <- FL.newStdoutLoggerSet FL.defaultBufSize
timeGetter <- bool (return $ getFormattedTime tzM) cachedTimeGetter cacheTime
return $ LoggerCtx loggerSet logLevel timeGetter
return $ LoggerCtx loggerSet logLevel timeGetter enabledLogs
where
cachedTimeGetter =
Auto.mkAutoUpdate Auto.defaultUpdateSettings {
@ -134,9 +190,9 @@ cleanLoggerCtx =
newtype Logger = Logger { unLogger :: forall a. (ToEngineLog a) => a -> IO () }
mkLogger :: LoggerCtx -> Logger
mkLogger (LoggerCtx loggerSet serverLogLevel timeGetter) = Logger $ \l -> do
mkLogger (LoggerCtx loggerSet serverLogLevel timeGetter enabledLogTypes) = Logger $ \l -> do
localTime <- timeGetter
let (logLevel, logTy, logDet) = toEngineLog l
when (logLevel >= serverLogLevel) $
when (logLevel >= serverLogLevel && logTy `Set.member` enabledLogTypes) $
FL.pushLogStrLn loggerSet $ FL.toLogStr $
J.encode $ EngineLog localTime logLevel logTy logDet

View File

@ -3,6 +3,8 @@ module Hasura.Prelude
, onNothing
, onJust
, onLeft
, bsToTxt
, txtToBs
) where
import Control.Applicative as M ((<|>))
@ -34,6 +36,10 @@ import GHC.Generics as M (Generic)
import Prelude as M hiding (fail, init, lookup)
import Text.Read as M (readEither, readMaybe)
import qualified Data.ByteString as B
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
onNothing :: (Monad m) => Maybe a -> m a -> m a
onNothing m act = maybe act return m
@ -43,5 +49,8 @@ onJust m action = maybe (return ()) action m
onLeft :: (Monad m) => Either e a -> (e -> m a) -> m a
onLeft e f = either f return e
bsToTxt :: B.ByteString -> Text
bsToTxt = TE.decodeUtf8With TE.lenientDecode
txtToBs :: Text -> B.ByteString
txtToBs = TE.encodeUtf8

View File

@ -42,6 +42,7 @@ import qualified Hasura.GraphQL.Transport.HTTP as GH
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.GraphQL.Transport.WebSocket as WS
import qualified Hasura.Logging as L
import qualified Hasura.Server.PGDump as PGD
import Hasura.EncJSON
import Hasura.Prelude hiding (get, put)
@ -56,7 +57,6 @@ import Hasura.Server.Cors
import Hasura.Server.Init
import Hasura.Server.Logging
import Hasura.Server.Middleware (corsMiddleware)
import qualified Hasura.Server.PGDump as PGD
import Hasura.Server.Query
import Hasura.Server.Utils
import Hasura.Server.Version
@ -128,24 +128,34 @@ data ServerCtx
data HandlerCtx
= HandlerCtx
{ hcServerCtx :: !ServerCtx
, hcReqBody :: !BL.ByteString
, hcUser :: !UserInfo
, hcReqHeaders :: ![N.Header]
, hcRequestId :: !RequestId
}
type Handler = ExceptT QErr (ReaderT HandlerCtx IO)
data APIResp
= JSONResp !(HttpResponse EncJSON)
| RawResp !(HttpResponse BL.ByteString) -- headers, body
| RawResp !(HttpResponse BL.ByteString)
apiRespToLBS :: APIResp -> BL.ByteString
apiRespToLBS = \case
JSONResp (HttpResponse j _) -> encJToLBS j
RawResp (HttpResponse b _) -> b
mkAPIRespHandler :: Handler (HttpResponse EncJSON) -> Handler APIResp
mkAPIRespHandler = fmap JSONResp
data APIHandler a
= AHGet !(Handler APIResp)
| AHPost !(a -> Handler APIResp)
mkGetHandler :: Handler APIResp -> APIHandler ()
mkGetHandler = AHGet
mkPostHandler :: (a -> Handler APIResp) -> APIHandler a
mkPostHandler = AHPost
mkAPIRespHandler :: (a -> Handler (HttpResponse EncJSON)) -> (a -> Handler APIResp)
mkAPIRespHandler = (fmap . fmap) JSONResp
isMetadataEnabled :: ServerCtx -> Bool
isMetadataEnabled sc = S.member METADATA $ scEnabledAPIs sc
@ -163,12 +173,11 @@ isDeveloperAPIEnabled :: ServerCtx -> Bool
isDeveloperAPIEnabled sc = S.member DEVELOPER $ scEnabledAPIs sc
-- {-# SCC parseBody #-}
parseBody :: (FromJSON a) => Handler a
parseBody = do
reqBody <- hcReqBody <$> ask
case decode' reqBody of
Just jVal -> decodeValue jVal
Nothing -> throw400 InvalidJSON "invalid json"
parseBody :: (FromJSON a, MonadError QErr m) => BL.ByteString -> m a
parseBody reqBody =
case eitherDecode' reqBody of
Left e -> throw400 InvalidJSON (T.pack e)
Right jVal -> decodeValue jVal
onlyAdmin :: Handler ()
onlyAdmin = do
@ -186,72 +195,103 @@ buildQCtx = do
logResult
:: (MonadIO m)
=> Maybe UserInfo -> Wai.Request -> BL.ByteString -> L.Logger
-> Either QErr BL.ByteString -> Maybe (UTCTime, UTCTime)
=> L.Logger
-> Maybe UserInfo
-> RequestId
-> Wai.Request
-> Maybe Value
-> Either QErr BL.ByteString
-> Maybe (UTCTime, UTCTime)
-> m ()
logResult userInfoM req reqBody logger res qTime =
liftIO $ L.unLogger logger $ mkAccessLog userInfoM req (reqBody, res) qTime
logResult logger userInfoM reqId httpReq req res qTime = do
let logline = case res of
Right res' -> mkHttpAccessLog userInfoM reqId httpReq res' qTime
Left e -> mkHttpErrorLog userInfoM reqId httpReq e req qTime
liftIO $ L.unLogger logger $ logline
-- logResult userInfoM req reqBody logger res qTime =
-- liftIO $ L.unLogger logger $ mkAccessLog userInfoM req (reqBody, res) qTime
logError
:: MonadIO m
=> Maybe UserInfo -> Wai.Request
-> BL.ByteString -> L.Logger -> QErr -> m ()
logError userInfoM req reqBody logger qErr =
logResult userInfoM req reqBody logger (Left qErr) Nothing
:: (MonadIO m)
=> L.Logger
-> Maybe UserInfo
-> RequestId
-> Wai.Request
-> Maybe Value
-> QErr -> m ()
logError logger userInfoM reqId httpReq req qErr =
liftIO $ L.unLogger logger $ mkHttpErrorLog userInfoM reqId httpReq qErr req Nothing
mkSpockAction
:: (MonadIO m)
:: (MonadIO m, FromJSON a, ToJSON a)
=> (Bool -> QErr -> Value)
-> (QErr -> QErr)
-> ServerCtx
-> Handler APIResp
-> APIHandler a
-> ActionT m ()
mkSpockAction qErrEncoder qErrModifier serverCtx handler = do
mkSpockAction qErrEncoder qErrModifier serverCtx apiHandler = do
req <- request
reqBody <- liftIO $ strictRequestBody req
let headers = requestHeaders req
let headers = requestHeaders req
authMode = scAuthMode serverCtx
manager = scManager serverCtx
requestId <- getRequestId headers
userInfoE <- liftIO $ runExceptT $ getUserInfo logger manager headers authMode
userInfo <- either (logAndThrow req reqBody False . qErrModifier) return userInfoE
userInfo <- either (logAndThrow requestId req reqBody False . qErrModifier)
return userInfoE
let handlerState = HandlerCtx serverCtx reqBody userInfo headers
let handlerState = HandlerCtx serverCtx userInfo headers requestId
curRole = userRole userInfo
t1 <- liftIO getCurrentTime -- for measuring response time purposes
result <- liftIO $ runReaderT (runExceptT handler) handlerState
(result, q) <- case apiHandler of
AHGet handler -> do
res <- liftIO $ runReaderT (runExceptT handler) handlerState
return (res, Nothing)
AHPost handler -> do
parsedReqE <- runExceptT $ parseBody reqBody
parsedReq <- either (qErrToResp (isAdmin curRole) . qErrModifier) return parsedReqE
res <- liftIO $ runReaderT (runExceptT $ handler parsedReq) handlerState
return (res, Just parsedReq)
t2 <- liftIO getCurrentTime -- for measuring response time purposes
-- apply the error modifier
let modResult = fmapL qErrModifier result
-- log result
logResult (Just userInfo) req reqBody logger (apiRespToLBS <$> modResult) $
Just (t1, t2)
either (qErrToResp $ userRole userInfo == adminRole) resToResp modResult
logResult logger (Just userInfo) requestId req (toJSON <$> q)
(apiRespToLBS <$> modResult) $ Just (t1, t2)
either (qErrToResp (isAdmin curRole)) (resToResp requestId) modResult
where
logger = scLogger serverCtx
logger = scLogger serverCtx
logAndThrow reqId req reqBody includeInternal qErr = do
let reqTxt = Just $ toJSON $ String $ bsToTxt $ BL.toStrict reqBody
logError logger Nothing reqId req reqTxt qErr
qErrToResp includeInternal qErr
-- encode error response
qErrToResp :: (MonadIO m) => Bool -> QErr -> ActionCtxT ctx m b
qErrToResp includeInternal qErr = do
setStatus $ qeStatus qErr
json $ qErrEncoder includeInternal qErr
logAndThrow req reqBody includeInternal qErr = do
logError Nothing req reqBody logger qErr
qErrToResp includeInternal qErr
resToResp = \case
resToResp reqId = \case
JSONResp (HttpResponse j h) -> do
uncurry setHeader jsonHeader
uncurry setHeader (requestIdHeader, unRequestId reqId)
mapM_ (mapM_ (uncurry setHeader . unHeader)) h
lazyBytes $ encJToLBS j
RawResp (HttpResponse b h) -> do
uncurry setHeader (requestIdHeader, unRequestId reqId)
mapM_ (mapM_ (uncurry setHeader . unHeader)) h
lazyBytes b
v1QueryHandler :: RQLQuery -> Handler (HttpResponse EncJSON)
v1QueryHandler query = do
scRef <- scCacheRef . hcServerCtx <$> ask
@ -274,7 +314,6 @@ v1QueryHandler query = do
v1Alpha1GQHandler :: GH.GQLReqUnparsed -> Handler (HttpResponse EncJSON)
v1Alpha1GQHandler query = do
userInfo <- asks hcUser
reqBody <- asks hcReqBody
reqHeaders <- asks hcReqHeaders
manager <- scManager . hcServerCtx <$> ask
scRef <- scCacheRef . hcServerCtx <$> ask
@ -282,11 +321,16 @@ v1Alpha1GQHandler query = do
pgExecCtx <- scPGExecCtx . hcServerCtx <$> ask
sqlGenCtx <- scSQLGenCtx . hcServerCtx <$> ask
planCache <- scPlanCache . hcServerCtx <$> ask
enableAL <- scEnableAllowlist . hcServerCtx <$> ask
GH.runGQ pgExecCtx userInfo sqlGenCtx enableAL planCache
sc scVer manager reqHeaders query reqBody
enableAL <- scEnableAllowlist . hcServerCtx <$> ask
logger <- scLogger . hcServerCtx <$> ask
requestId <- asks hcRequestId
let execCtx = E.ExecutionCtx logger sqlGenCtx pgExecCtx planCache
sc scVer manager enableAL
flip runReaderT execCtx $ GH.runGQ requestId userInfo reqHeaders query
v1GQHandler :: GH.GQLReqUnparsed -> Handler (HttpResponse EncJSON)
v1GQHandler
:: GH.GQLReqUnparsed
-> Handler (HttpResponse EncJSON)
v1GQHandler = v1Alpha1GQHandler
gqlExplainHandler :: GE.GQLExplain -> Handler (HttpResponse EncJSON)
@ -346,8 +390,10 @@ mkConsoleHTML path authMode enableTelemetry consoleAssetsDir =
r -> "/console/" <> r
errMsg = "console template rendering failed: " ++ show errs
newtype QueryParser
= QueryParser { getQueryParser :: QualifiedTable -> Handler RQLQuery }
= QueryParser
{ getQueryParser :: QualifiedTable -> Object -> Handler RQLQuery }
queryParsers :: M.HashMap T.Text QueryParser
queryParsers =
@ -360,16 +406,16 @@ queryParsers =
]
where
mkQueryParser f =
QueryParser $ \qt -> do
obj <- parseBody
QueryParser $ \qt obj -> do
let val = Object $ M.insert "table" (toJSON qt) obj
q <- decodeValue val
return $ f q
legacyQueryHandler :: TableName -> T.Text -> Handler (HttpResponse EncJSON)
legacyQueryHandler tn queryType =
legacyQueryHandler :: TableName -> T.Text -> Object
-> Handler (HttpResponse EncJSON)
legacyQueryHandler tn queryType req =
case M.lookup queryType queryParsers of
Just queryParser -> getQueryParser queryParser qt >>= v1QueryHandler
Just queryParser -> getQueryParser queryParser qt req >>= v1QueryHandler
Nothing -> throw404 "No such resource exists"
where
qt = QualifiedObject publicSchema tn
@ -399,8 +445,8 @@ mkWaiApp
-> EL.LQOpts
-> IO (Wai.Application, SchemaCacheRef, Maybe UTCTime)
mkWaiApp isoLevel loggerCtx sqlGenCtx enableAL pool ci httpManager mode corsCfg
enableConsole consoleAssetsDir enableTelemetry instanceId apis
lqOpts = do
enableConsole consoleAssetsDir enableTelemetry instanceId apis lqOpts = do
let pgExecCtx = PGExecCtx pool isoLevel
pgExecCtxSer = PGExecCtx pool Q.Serializable
(cacheRef, cacheBuiltTime) <- do
@ -474,63 +520,56 @@ httpApp corsCfg serverCtx enableConsole consoleAssetsDir enableTelemetry = do
put ("v1/template" <//> var) tmpltPutOrPostH
delete ("v1/template" <//> var) tmpltGetOrDeleteH
post "v1/query" $ mkSpockAction encodeQErr id serverCtx $ mkAPIRespHandler $ do
query <- parseBody
v1QueryHandler query
post "v1/query" $ mkSpockAction encodeQErr id serverCtx $
mkPostHandler $ mkAPIRespHandler v1QueryHandler
post ("api/1/table" <//> var <//> var) $ \tableName queryType ->
mkSpockAction encodeQErr id serverCtx $ mkAPIRespHandler $
legacyQueryHandler (TableName tableName) queryType
mkSpockAction encodeQErr id serverCtx $ mkPostHandler $
mkAPIRespHandler $ legacyQueryHandler (TableName tableName) queryType
when enablePGDump $
post "v1alpha1/pg_dump" $ mkSpockAction encodeQErr id serverCtx $ do
query <- parseBody
v1Alpha1PGDumpHandler query
post "v1alpha1/pg_dump" $ mkSpockAction encodeQErr id serverCtx $
mkPostHandler v1Alpha1PGDumpHandler
when enableConfig $
get "v1alpha1/config" $ mkSpockAction encodeQErr id serverCtx $
mkAPIRespHandler $ do
mkGetHandler $ do
onlyAdmin
return $ HttpResponse
(encJFromJValue $ runGetConfig (scAuthMode serverCtx))
Nothing
let res = encJFromJValue $ runGetConfig (scAuthMode serverCtx)
return $ JSONResp $ HttpResponse res Nothing
when enableGraphQL $ do
post "v1alpha1/graphql/explain" gqlExplainAction
post "v1alpha1/graphql" $ mkSpockAction GH.encodeGQErr id serverCtx $
mkAPIRespHandler $ do
query <- parseBody
v1Alpha1GQHandler query
mkPostHandler $ mkAPIRespHandler v1Alpha1GQHandler
post "v1/graphql/explain" gqlExplainAction
post "v1/graphql" $ mkSpockAction GH.encodeGQErr allMod200 serverCtx $
mkAPIRespHandler $ do
query <- parseBody
v1GQHandler query
mkPostHandler $ mkAPIRespHandler v1GQHandler
when (isDeveloperAPIEnabled serverCtx) $ do
get "dev/ekg" $ mkSpockAction encodeQErr id serverCtx $
mkAPIRespHandler $ do
mkGetHandler $ do
onlyAdmin
respJ <- liftIO $ EKG.sampleAll $ scEkgStore serverCtx
return $ HttpResponse (encJFromJValue $ EKG.sampleToJson respJ) Nothing
return $ JSONResp $ HttpResponse (encJFromJValue $ EKG.sampleToJson respJ) Nothing
get "dev/plan_cache" $ mkSpockAction encodeQErr id serverCtx $
mkAPIRespHandler $ do
mkGetHandler $ do
onlyAdmin
respJ <- liftIO $ E.dumpPlanCache $ scPlanCache serverCtx
return $ HttpResponse (encJFromJValue respJ) Nothing
return $ JSONResp $ HttpResponse (encJFromJValue respJ) Nothing
get "dev/subscriptions" $ mkSpockAction encodeQErr id serverCtx $
mkAPIRespHandler $ do
mkGetHandler $ do
onlyAdmin
respJ <- liftIO $ EL.dumpLiveQueriesState False $ scLQState serverCtx
return $ HttpResponse (encJFromJValue respJ) Nothing
return $ JSONResp $ HttpResponse (encJFromJValue respJ) Nothing
get "dev/subscriptions/extended" $ mkSpockAction encodeQErr id serverCtx $
mkAPIRespHandler $ do
mkGetHandler $ do
onlyAdmin
respJ <- liftIO $ EL.dumpLiveQueriesState True $ scLQState serverCtx
return $ HttpResponse (encJFromJValue respJ) Nothing
return $ JSONResp $ HttpResponse (encJFromJValue respJ) Nothing
forM_ [GET,POST] $ \m -> hookAny m $ \_ -> do
let qErr = err404 NotFound "resource does not exist"
@ -538,28 +577,29 @@ httpApp corsCfg serverCtx enableConsole consoleAssetsDir enableTelemetry = do
where
logger = scLogger serverCtx
-- all graphql errors should be of type 200
allMod200 qe = qe { qeStatus = N.status200 }
gqlExplainAction =
mkSpockAction encodeQErr id serverCtx $ mkAPIRespHandler $ do
expQuery <- parseBody
gqlExplainHandler expQuery
mkSpockAction encodeQErr id serverCtx $ mkPostHandler $
mkAPIRespHandler gqlExplainHandler
enableGraphQL = isGraphQLEnabled serverCtx
enableMetadata = isMetadataEnabled serverCtx
enablePGDump = isPGDumpEnabled serverCtx
enableConfig = isConfigEnabled serverCtx
tmpltGetOrDeleteH tmpltName = do
tmpltArgs <- tmpltArgsFromQueryParams
mkSpockAction encodeQErr id serverCtx $ mkAPIRespHandler $
mkQTemplateAction tmpltName tmpltArgs
mkSpockAction encodeQErr id serverCtx $ mkGetHandler $
JSONResp <$> mkQTemplateAction tmpltName tmpltArgs
tmpltPutOrPostH tmpltName = do
tmpltArgs <- tmpltArgsFromQueryParams
mkSpockAction encodeQErr id serverCtx $ mkAPIRespHandler $ do
bodyTmpltArgs <- parseBody
mkQTemplateAction tmpltName $ M.union bodyTmpltArgs tmpltArgs
mkSpockAction encodeQErr id serverCtx $ mkPostHandler $
mkAPIRespHandler $ \bodyTmpltArgs ->
mkQTemplateAction tmpltName $ M.union bodyTmpltArgs tmpltArgs
tmpltArgsFromQueryParams = do
qparams <- params
@ -588,7 +628,9 @@ raiseGenericApiError :: L.Logger -> QErr -> ActionT IO ()
raiseGenericApiError logger qErr = do
req <- request
reqBody <- liftIO $ strictRequestBody req
logError Nothing req reqBody logger qErr
let reqTxt = toJSON $ String $ bsToTxt $ BL.toStrict reqBody
reqId <- getRequestId $ requestHeaders req
logError logger Nothing reqId req (Just reqTxt) qErr
uncurry setHeader jsonHeader
setStatus $ qeStatus qErr
lazyBytes $ encode qErr

View File

@ -17,20 +17,19 @@ module Hasura.Server.Auth
, jwkRefreshCtrl
) where
import Control.Exception (try)
import Control.Exception (try)
import Control.Lens
import Data.Aeson
import Data.IORef (newIORef)
import Data.Time.Clock (UTCTime)
import Data.IORef (newIORef)
import Data.Time.Clock (UTCTime)
import qualified Data.Aeson as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as Map
import qualified Data.String.Conversions as CS
import qualified Data.Text as T
import qualified Network.HTTP.Client as H
import qualified Network.HTTP.Types as N
import qualified Network.Wreq as Wreq
import qualified Data.Aeson as J
import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as Map
import qualified Data.Text as T
import qualified Network.HTTP.Client as H
import qualified Network.HTTP.Types as N
import qualified Network.Wreq as Wreq
import Hasura.HTTP
import Hasura.Logging
@ -40,7 +39,7 @@ import Hasura.Server.Auth.JWT
import Hasura.Server.Logging
import Hasura.Server.Utils
import qualified Hasura.Logging as L
import qualified Hasura.Logging as L
newtype AdminSecret
@ -77,7 +76,7 @@ mkAuthMode
)
=> Maybe AdminSecret
-> Maybe AuthHook
-> Maybe T.Text
-> Maybe JWTConfig
-> Maybe RoleName
-> H.Manager
-> LoggerCtx
@ -113,13 +112,11 @@ mkJwtCtx
:: ( MonadIO m
, MonadError T.Text m
)
=> T.Text
=> JWTConfig
-> H.Manager
-> LoggerCtx
-> m JWTCtx
mkJwtCtx jwtConf httpManager loggerCtx = do
-- the JWT Conf as JSON string; try to parse it
conf <- either decodeErr return $ eitherDecodeStrict $ CS.cs jwtConf
mkJwtCtx conf httpManager loggerCtx = do
jwkRef <- case jcKeyOrUrl conf of
Left jwk -> liftIO $ newIORef (JWKSet [jwk])
Right url -> do
@ -133,8 +130,6 @@ mkJwtCtx jwtConf httpManager loggerCtx = do
return ref
let claimsFmt = fromMaybe JCFJson (jcClaimsFormat conf)
return $ JWTCtx jwkRef (jcClaimNs conf) (jcAudience conf) claimsFmt
where
decodeErr e = throwError . T.pack $ "Fatal Error: JWT conf: " <> e
mkUserInfoFromResp
:: (MonadIO m, MonadError QErr m)

View File

@ -24,12 +24,12 @@ import Data.Time.Format (defaultTimeLocale, parseTimeM)
import Network.URI (URI)
import Hasura.HTTP
import Hasura.Logging (Logger (..))
import Hasura.Logging (LogLevel (..), Logger (..))
import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.Server.Auth.JWT.Internal (parseHmacKey, parseRsaKey)
import Hasura.Server.Auth.JWT.Logging
import Hasura.Server.Utils (bsToTxt, diffTimeToMicro,
import Hasura.Server.Utils (diffTimeToMicro,
userRoleHeader)
import qualified Control.Concurrent as C
@ -128,19 +128,23 @@ updateJwkRef
-> m (Maybe NominalDiffTime)
updateJwkRef (Logger logger) manager url jwkRef = do
let options = wreqOptions manager []
urlT = T.pack $ show url
infoMsg = "refreshing JWK from endpoint: " <> urlT
liftIO $ logger $ JwkRefreshLog LevelInfo infoMsg Nothing
res <- liftIO $ try $ Wreq.getWith options $ show url
resp <- either logAndThrowHttp return res
let status = resp ^. Wreq.responseStatus
respBody = resp ^. Wreq.responseBody
when (status ^. Wreq.statusCode /= 200) $ do
let urlT = T.pack $ show url
respBodyT = Just $ CS.cs respBody
errMsg = "non-200 response on fetching JWK from: " <> urlT
let respBodyT = Just $ CS.cs respBody
errMsg = "Non-200 response on fetching JWK from: " <> urlT
httpErr = Just (JwkRefreshHttpError (Just status) urlT Nothing respBodyT)
logAndThrow errMsg httpErr
jwkset <- either (\e -> logAndThrow (T.pack e) Nothing) return . A.eitherDecode $ respBody
let parseErr e = "Error parsing JWK from url (" <> urlT <> "): " <> T.pack e
jwkset <- either (\e -> logAndThrow (parseErr e) Nothing) return $
A.eitherDecode respBody
liftIO $ modifyIORef jwkRef (const jwkset)
let mExpiresT = resp ^? Wreq.responseHeader "Expires"
@ -153,14 +157,14 @@ updateJwkRef (Logger logger) manager url jwkRef = do
where
logAndThrow :: (MonadIO m, MonadError T.Text m) => T.Text -> Maybe JwkRefreshHttpError -> m a
logAndThrow err httpErr = do
liftIO $ logger $ mkJwkRefreshLog err httpErr
liftIO $ logger $ JwkRefreshLog (LevelOther "critical") err httpErr
throwError err
logAndThrowHttp :: (MonadIO m, MonadError T.Text m) => HTTP.HttpException -> m a
logAndThrowHttp err = do
let httpErr = JwkRefreshHttpError Nothing (T.pack $ show url)
(Just $ HttpException err) Nothing
errMsg = "error fetching JWK: " <> T.pack (show err)
errMsg = "Error fetching JWK: " <> T.pack (show err)
logAndThrow errMsg (Just httpErr)
timeFmt = "%a, %d %b %Y %T GMT"
@ -334,6 +338,19 @@ verifyJwt ctx (RawJWT rawJWT) = do
config = defaultJWTValidationSettings audCheck
instance A.ToJSON JWTConfig where
toJSON (JWTConfig ty keyOrUrl claimNs aud claimsFmt) =
case keyOrUrl of
Left _ -> mkObj ("key" A..= A.String "<JWK REDACTED>")
Right url -> mkObj ("jwk_url" A..= url)
where
mkObj item = A.object [ "type" A..= ty
, "claims_namespace" A..= claimNs
, "claims_format" A..= claimsFmt
, "audience" A..= aud
, item
]
-- | Parse from a json string like:
-- | `{"type": "RS256", "key": "<PEM-encoded-public-key-or-X509-cert>"}`
-- | to JWTConfig

View File

@ -8,7 +8,8 @@ module Hasura.Server.Auth.JWT.Logging
import Data.Aeson
import Hasura.HTTP
import Hasura.Logging (LogLevel (..), ToEngineLog (..))
import Hasura.Logging (EngineLogType (..), LogLevel (..),
ToEngineLog (..))
import Hasura.Prelude
import Hasura.Server.Logging ()
@ -47,7 +48,7 @@ instance ToJSON JwkRefreshLog where
instance ToEngineLog JwkRefreshLog where
toEngineLog jwkRefreshLog =
(jrlLogLevel jwkRefreshLog, "jwk-refresh-log", toJSON jwkRefreshLog)
(jrlLogLevel jwkRefreshLog, ELTJwkRefreshLog, toJSON jwkRefreshLog)
mkJwkRefreshLog :: T.Text -> Maybe JwkRefreshHttpError -> JwkRefreshLog
mkJwkRefreshLog = JwkRefreshLog (LevelOther "critical")
mkJwkRefreshLog :: LogLevel -> T.Text -> Maybe JwkRefreshHttpError -> JwkRefreshLog
mkJwkRefreshLog = JwkRefreshLog

View File

@ -30,7 +30,7 @@ newtype UpdateInfo
$(A.deriveJSON (A.aesonDrop 2 A.snakeCase) ''UpdateInfo)
checkForUpdates :: LoggerCtx -> H.Manager -> IO ()
checkForUpdates (LoggerCtx loggerSet _ _) manager = do
checkForUpdates (LoggerCtx loggerSet _ _ _) manager = do
let options = wreqOptions manager []
url <- getUrl
forever $ do

View File

@ -3,10 +3,14 @@ module Hasura.Server.Init where
import qualified Database.PG.Query as Q
import Options.Applicative
import Data.Char (toLower)
import Network.Wai.Handler.Warp (HostPreference)
import Options.Applicative
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.ByteString.Lazy.Char8 as BLC
import qualified Data.HashSet as Set
import qualified Data.String as DataString
import qualified Data.Text as T
@ -23,14 +27,20 @@ import Hasura.Server.Auth
import Hasura.Server.Cors
import Hasura.Server.Logging
import Hasura.Server.Utils
import Network.Wai.Handler.Warp
newtype InstanceId
= InstanceId {getInstanceId :: T.Text}
deriving (Show, Eq, J.ToJSON, J.FromJSON)
= InstanceId { getInstanceId :: Text }
deriving (Show, Eq, J.ToJSON, J.FromJSON)
mkInstanceId :: IO InstanceId
mkInstanceId = (InstanceId . UUID.toText) <$> UUID.nextRandom
mkInstanceId = InstanceId . UUID.toText <$> UUID.nextRandom
data StartupTimeInfo
= StartupTimeInfo
{ _stiMessage :: !Text
, _stiTimeTaken :: !Double
}
$(J.deriveJSON (J.aesonDrop 4 J.snakeCase) ''StartupTimeInfo)
data RawConnParams
= RawConnParams
@ -50,7 +60,7 @@ data RawServeOptions
, rsoTxIso :: !(Maybe Q.TxIsolation)
, rsoAdminSecret :: !(Maybe AdminSecret)
, rsoAuthHook :: !RawAuthHook
, rsoJwtSecret :: !(Maybe Text)
, rsoJwtSecret :: !(Maybe JWTConfig)
, rsoUnAuthRole :: !(Maybe RoleName)
, rsoCorsConfig :: !(Maybe CorsConfig)
, rsoEnableConsole :: !Bool
@ -63,6 +73,8 @@ data RawServeOptions
, rsoMxBatchSize :: !(Maybe LQ.BatchSize)
, rsoFallbackRefetchInt :: !(Maybe LQ.RefetchInterval)
, rsoEnableAllowlist :: !Bool
, rsoEnabledLogTypes :: !(Maybe [L.EngineLogType])
, rsoLogLevel :: !(Maybe L.LogLevel)
} deriving (Show, Eq)
data ServeOptions
@ -73,7 +85,7 @@ data ServeOptions
, soTxIso :: !Q.TxIsolation
, soAdminSecret :: !(Maybe AdminSecret)
, soAuthHook :: !(Maybe AuthHook)
, soJwtSecret :: !(Maybe Text)
, soJwtSecret :: !(Maybe JWTConfig)
, soUnAuthRole :: !(Maybe RoleName)
, soCorsConfig :: !CorsConfig
, soEnableConsole :: !Bool
@ -83,6 +95,8 @@ data ServeOptions
, soEnabledAPIs :: !(Set.HashSet API)
, soLiveQueryOpts :: !LQ.LQOpts
, soEnableAllowlist :: !Bool
, soEnabledLogTypes :: !(Set.HashSet L.EngineLogType)
, soLogLevel :: !L.LogLevel
} deriving (Show, Eq)
data RawConnInfo =
@ -112,6 +126,8 @@ data API
| DEVELOPER
| CONFIG
deriving (Show, Eq, Read, Generic)
$(J.deriveJSON (J.defaultOptions { J.constructorTagModifier = map toLower })
''API)
instance Hashable API
@ -171,6 +187,15 @@ instance FromEnv LQ.BatchSize where
instance FromEnv LQ.RefetchInterval where
fromEnv = fmap LQ.refetchIntervalFromMilli . readEither
instance FromEnv JWTConfig where
fromEnv = readJson
instance FromEnv [L.EngineLogType] where
fromEnv = readLogTypes
instance FromEnv L.LogLevel where
fromEnv = readLogLevel
parseStrAsBool :: String -> Either String Bool
parseStrAsBool t
| map toLower t `elem` truthVals = Right True
@ -229,6 +254,10 @@ withEnvBool bVal envVar =
mEnvVal <- considerEnv envVar
maybe (return False) return mEnvVal
withEnvJwtConf :: Maybe JWTConfig -> String -> WithEnv (Maybe JWTConfig)
withEnvJwtConf jVal envVar =
maybe (considerEnv envVar) returnJust jVal
mkHGEOptions :: RawHGEOptions -> WithEnv HGEOptions
mkHGEOptions (HGEOptionsG rawConnInfo rawCmd) =
HGEOptionsG <$> connInfo <*> cmd
@ -260,11 +289,10 @@ mkServeOptions rso = do
withEnv (rsoHost rso) (fst serveHostEnv)
connParams <- mkConnParams $ rsoConnParams rso
txIso <- fromMaybe Q.ReadCommitted <$>
withEnv (rsoTxIso rso) (fst txIsoEnv)
txIso <- fromMaybe Q.ReadCommitted <$> withEnv (rsoTxIso rso) (fst txIsoEnv)
adminScrt <- withEnvs (rsoAdminSecret rso) $ map fst [adminSecretEnv, accessKeyEnv]
authHook <- mkAuthHook $ rsoAuthHook rso
jwtSecret <- withEnv (rsoJwtSecret rso) $ fst jwtSecretEnv
jwtSecret <- withEnvJwtConf (rsoJwtSecret rso) $ fst jwtSecretEnv
unAuthRole <- withEnv (rsoUnAuthRole rso) $ fst unAuthRoleEnv
corsCfg <- mkCorsConfig $ rsoCorsConfig rso
enableConsole <- withEnvBool (rsoEnableConsole rso) $
@ -277,9 +305,13 @@ mkServeOptions rso = do
withEnv (rsoEnabledAPIs rso) (fst enabledAPIsEnv)
lqOpts <- mkLQOpts
enableAL <- withEnvBool (rsoEnableAllowlist rso) $ fst enableAllowlistEnv
enabledLogs <- Set.fromList . fromMaybe (Set.toList L.defaultEnabledLogTypes) <$>
withEnv (rsoEnabledLogTypes rso) (fst enabledLogsEnv)
serverLogLevel <- fromMaybe L.LevelInfo <$> withEnv (rsoLogLevel rso) (fst logLevelEnv)
return $ ServeOptions port host connParams txIso adminScrt authHook jwtSecret
unAuthRole corsCfg enableConsole consoleAssetsDir
enableTelemetry strfyNum enabledAPIs lqOpts enableAL
enabledLogs serverLogLevel
where
#ifdef DeveloperAPIs
defaultAPIs = [METADATA,GRAPHQL,PGDUMP,CONFIG,DEVELOPER]
@ -405,13 +437,12 @@ serveCmdFooter =
envVarDoc = mkEnvVarDoc $ envVars <> eventEnvs
envVars =
[ databaseUrlEnv, retriesNumEnv, servePortEnv, serveHostEnv,
pgStripesEnv, pgConnsEnv, pgTimeoutEnv
, pgUsePrepareEnv, txIsoEnv, adminSecretEnv
, accessKeyEnv, authHookEnv, authHookModeEnv
[ databaseUrlEnv, retriesNumEnv, servePortEnv, serveHostEnv
, pgStripesEnv, pgConnsEnv, pgTimeoutEnv, pgUsePrepareEnv, txIsoEnv
, adminSecretEnv , accessKeyEnv, authHookEnv, authHookModeEnv
, jwtSecretEnv, unAuthRoleEnv, corsDomainEnv, enableConsoleEnv
, enableTelemetryEnv, wsReadCookieEnv, stringifyNumEnv, enabledAPIsEnv
, enableAllowlistEnv
, enableAllowlistEnv, enabledLogsEnv, logLevelEnv
]
eventEnvs =
@ -546,7 +577,7 @@ stringifyNumEnv =
enabledAPIsEnv :: (String, String)
enabledAPIsEnv =
( "HASURA_GRAPHQL_ENABLED_APIS"
, "List of comma separated list of allowed APIs. (default: metadata,graphql,pgdump,config)"
, "Comma separated list of enabled APIs. (default: metadata,graphql,pgdump,config)"
)
consoleAssetsDirEnv :: (String, String)
@ -557,6 +588,20 @@ consoleAssetsDirEnv =
++ " default docker image to disable loading assets from CDN."
)
enabledLogsEnv :: (String, String)
enabledLogsEnv =
( "HASURA_GRAPHQL_ENABLED_LOG_TYPES"
, "Comma separated list of enabled log types "
<> "(default: startup,http-log,webhook-log,websocket-log)"
<> "(all: startup,http-log,webhook-log,websocket-log,query-log)"
)
logLevelEnv :: (String, String)
logLevelEnv =
( "HASURA_GRAPHQL_LOG_LEVEL"
, "Server log level (default: info) (all: error, warn, info, debug)"
)
parseRawConnInfo :: Parser RawConnInfo
parseRawConnInfo =
RawConnInfo <$> host <*> port <*> user <*> password
@ -717,6 +762,30 @@ readAPIs = mapM readAPI . T.splitOn "," . T.pack
"CONFIG" -> Right CONFIG
_ -> Left "Only expecting list of comma separated API types metadata,graphql,pgdump,developer,config"
readLogTypes :: String -> Either String [L.EngineLogType]
readLogTypes = mapM readLogType . T.splitOn "," . T.pack
where readLogType si = case T.toLower $ T.strip si of
"startup" -> Right L.ELTStartup
"http-log" -> Right L.ELTHttpLog
"webhook-log" -> Right L.ELTWebhookLog
"websocket-log" -> Right L.ELTWebsocketLog
"query-log" -> Right L.ELTQueryLog
_ -> Left $ "Valid list of comma-separated log types: "
<> BLC.unpack (J.encode L.userAllowedLogTypes)
readLogLevel :: String -> Either String L.LogLevel
readLogLevel s = case T.toLower $ T.strip $ T.pack s of
"debug" -> Right L.LevelDebug
"info" -> Right L.LevelInfo
"warn" -> Right L.LevelWarn
"error" -> Right L.LevelError
_ -> Left "Valid log levels: debug, info, warn or error"
readJson :: (J.FromJSON a) => String -> Either String a
readJson = J.eitherDecodeStrict . txtToBs . T.pack
parseWebHook :: Parser RawAuthHook
parseWebHook =
AuthHookG <$> url <*> urlType
@ -733,14 +802,14 @@ parseWebHook =
help (snd authHookModeEnv)
)
parseJwtSecret :: Parser (Maybe Text)
parseJwtSecret :: Parser (Maybe JWTConfig)
parseJwtSecret =
optional $ strOption
( long "jwt-secret" <>
metavar "<JSON CONFIG>" <>
help (snd jwtSecretEnv)
)
optional $
option (eitherReader readJson)
( long "jwt-secret" <>
metavar "<JSON CONFIG>" <>
help (snd jwtSecretEnv)
)
jwtSecretHelp :: String
jwtSecretHelp = "The JSON containing type and the JWK used for verifying. e.g: "
@ -838,15 +907,15 @@ parseEnableAllowlist =
mxRefetchDelayEnv :: (String, String)
mxRefetchDelayEnv =
( "HASURA_GRAPHQL_LIVE_QUERIES_MULTIPLEXED_REFETCH_INTERVAL"
, "results will only be sent once in this interval (in milliseconds) for \\
\live queries which can be multiplexed. Default: 1000 (1sec)"
, "results will only be sent once in this interval (in milliseconds) for "
<> "live queries which can be multiplexed. Default: 1000 (1sec)"
)
mxBatchSizeEnv :: (String, String)
mxBatchSizeEnv =
( "HASURA_GRAPHQL_LIVE_QUERIES_MULTIPLEXED_BATCH_SIZE"
, "multiplexed live queries are split into batches of the specified \\
\size. Default 100. "
, "multiplexed live queries are split into batches of the specified "
<> "size. Default 100. "
)
enableAllowlistEnv :: (String, String)
@ -867,10 +936,24 @@ parseFallbackRefetchInt =
fallbackRefetchDelayEnv :: (String, String)
fallbackRefetchDelayEnv =
( "HASURA_GRAPHQL_LIVE_QUERIES_FALLBACK_REFETCH_INTERVAL"
, "results will only be sent once in this interval (in milliseconds) for \\
\live queries which cannot be multiplexed. Default: 1000 (1sec)"
, "results will only be sent once in this interval (in milliseconds) for "
<> "live queries which cannot be multiplexed. Default: 1000 (1sec)"
)
parseEnabledLogs :: Parser (Maybe [L.EngineLogType])
parseEnabledLogs = optional $
option (eitherReader readLogTypes)
( long "enabled-log-types" <>
help (snd enabledLogsEnv)
)
parseLogLevel :: Parser (Maybe L.LogLevel)
parseLogLevel = optional $
option (eitherReader readLogLevel)
( long "log-level" <>
help (snd logLevelEnv)
)
-- Init logging related
connInfoToLog :: Q.ConnInfo -> StartupLog
connInfoToLog (Q.ConnInfo host port user _ db _ retries) =
@ -885,12 +968,15 @@ connInfoToLog (Q.ConnInfo host port user _ db _ retries) =
serveOptsToLog :: ServeOptions -> StartupLog
serveOptsToLog so =
StartupLog L.LevelInfo "serve_options" infoVal
StartupLog L.LevelInfo "server_configuration" infoVal
where
infoVal = J.object [ "port" J..= soPort so
, "server_host" J..= show (soHost so)
, "transaction_isolation" J..= show (soTxIso so)
, "admin_secret_set" J..= isJust (soAdminSecret so)
, "auth_hook" J..= (ahUrl <$> soAuthHook so)
, "auth_hook_mode" J..= (show . ahType <$> soAuthHook so)
, "jwt_secret" J..= (J.toJSON <$> soJwtSecret so)
, "unauth_role" J..= soUnAuthRole so
, "cors_config" J..= soCorsConfig so
, "enable_console" J..= soEnableConsole so
@ -898,12 +984,20 @@ serveOptsToLog so =
, "enable_telemetry" J..= soEnableTelemetry so
, "use_prepared_statements" J..= (Q.cpAllowPrepare . soConnParams) so
, "stringify_numeric_types" J..= soStringifyNum so
, "enabled_apis" J..= soEnabledAPIs so
, "live_query_options" J..= soLiveQueryOpts so
, "enable_allowlist" J..= soEnableAllowlist so
, "enabled_log_types" J..= soEnabledLogTypes so
, "log_level" J..= soLogLevel so
]
mkGenericStrLog :: T.Text -> String -> StartupLog
mkGenericStrLog k msg =
StartupLog L.LevelInfo k $ J.toJSON msg
mkGenericStrLog :: L.LogLevel -> T.Text -> String -> StartupLog
mkGenericStrLog logLevel k msg =
StartupLog logLevel k $ J.toJSON msg
mkGenericLog :: (J.ToJSON a) => L.LogLevel -> Text -> a -> StartupLog
mkGenericLog logLevel k msg =
StartupLog logLevel k $ J.toJSON msg
inconsistentMetadataLog :: SchemaCache -> StartupLog
inconsistentMetadataLog sc =

View File

@ -4,42 +4,40 @@ module Hasura.Server.Logging
( StartupLog(..)
, PGLog(..)
, mkInconsMetadataLog
, mkAccessLog
, getRequestHeader
, mkHttpAccessLog
, mkHttpErrorLog
, WebHookLog(..)
, WebHookLogger
, HttpException
) where
import Crypto.Hash (Digest, SHA1, hash)
import Data.Aeson
import Data.Bits (shift, (.&.))
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Lazy as BL
import Data.Int (Int64)
import Data.List (find)
import qualified Data.TByteString as TBS
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.Bits (shift, (.&.))
import Data.ByteString.Char8 (ByteString)
import Data.Int (Int64)
import Data.List (find)
import Data.Time.Clock
import Data.Word (Word32)
import Network.Socket (SockAddr (..))
import Network.Wai (Request (..))
import System.ByteOrder (ByteOrder (..), byteOrder)
import Text.Printf (printf)
import Data.Word (Word32)
import Network.Socket (SockAddr (..))
import System.ByteOrder (ByteOrder (..), byteOrder)
import Text.Printf (printf)
import qualified Data.ByteString.Char8 as BS
import qualified Data.CaseInsensitive as CI
import qualified Network.HTTP.Types as N
import qualified Data.ByteString.Char8 as BS
import qualified Data.ByteString.Lazy as BL
import qualified Data.Text as T
import qualified Network.HTTP.Types as N
import qualified Network.Wai as Wai
import Hasura.HTTP
import qualified Hasura.Logging as L
import Hasura.Logging (EngineLogType (..))
import Hasura.Prelude
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.Metadata
import Hasura.RQL.Types.Permission
import Hasura.RQL.Types
import Hasura.Server.Utils
import qualified Hasura.Logging as L
data StartupLog
= StartupLog
{ slLogLevel :: !L.LogLevel
@ -55,7 +53,7 @@ instance ToJSON StartupLog where
instance L.ToEngineLog StartupLog where
toEngineLog startupLog =
(slLogLevel startupLog, "startup", toJSON startupLog)
(slLogLevel startupLog, ELTStartup, toJSON startupLog)
data PGLog
= PGLog
@ -69,7 +67,7 @@ instance ToJSON PGLog where
instance L.ToEngineLog PGLog where
toEngineLog pgLog =
(plLogLevel pgLog, "pg-client", toJSON pgLog)
(plLogLevel pgLog, ELTPgClient, toJSON pgLog)
data MetadataLog
= MetadataLog
@ -86,7 +84,7 @@ instance ToJSON MetadataLog where
instance L.ToEngineLog MetadataLog where
toEngineLog ml =
(mlLogLevel ml, "metadata", toJSON ml)
(mlLogLevel ml, ELTMetadata, toJSON ml)
mkInconsMetadataLog :: [InconsistentMetadataObj] -> MetadataLog
mkInconsMetadataLog objs =
@ -105,7 +103,7 @@ data WebHookLog
instance L.ToEngineLog WebHookLog where
toEngineLog webHookLog =
(whlLogLevel webHookLog, "webhook-log", toJSON webHookLog)
(whlLogLevel webHookLog, ELTWebhookLog, toJSON webHookLog)
instance ToJSON WebHookLog where
toJSON whl =
@ -118,118 +116,128 @@ instance ToJSON WebHookLog where
type WebHookLogger = WebHookLog -> IO ()
data AccessLog
= AccessLog
{ alStatus :: !N.Status
, alMethod :: !T.Text
, alSource :: !T.Text
, alPath :: !T.Text
, alHttpVersion :: !N.HttpVersion
, alDetail :: !(Maybe Value)
, alRequestId :: !(Maybe T.Text)
, alHasuraUser :: !(Maybe UserVars)
, alQueryHash :: !(Maybe T.Text)
, alResponseSize :: !(Maybe Int64)
, alResponseTime :: !(Maybe Double)
-- | Log information about the HTTP request
data HttpInfoLog
= HttpInfoLog
{ hlStatus :: !N.Status
, hlMethod :: !T.Text
, hlSource :: !T.Text
, hlPath :: !T.Text
, hlHttpVersion :: !N.HttpVersion
} deriving (Show, Eq)
instance L.ToEngineLog AccessLog where
toEngineLog accessLog =
(L.LevelInfo, "http-log", toJSON accessLog)
instance ToJSON AccessLog where
toJSON (AccessLog st met src path hv det reqId hUser qh rs rt) =
instance ToJSON HttpInfoLog where
toJSON (HttpInfoLog st met src path hv) =
object [ "status" .= N.statusCode st
, "method" .= met
, "ip" .= src
, "url" .= path
, "http_version" .= show hv
, "detail" .= det
, "request_id" .= reqId
, "user" .= hUser
, "query_hash" .= qh
, "response_size" .= rs
, "query_execution_time" .= rt
]
data LogDetail
= LogDetail
{ _ldQuery :: !TBS.TByteString
, _ldError :: !Value
-- | Information about a GraphQL/Hasura metadata operation over HTTP
data OperationLog
= OperationLog
{ olRequestId :: !RequestId
, olUserVars :: !(Maybe UserVars)
, olResponseSize :: !(Maybe Int64)
, olQueryExecutionTime :: !(Maybe Double)
, olQuery :: !(Maybe Value)
, olError :: !(Maybe Value)
} deriving (Show, Eq)
$(deriveToJSON (aesonDrop 2 snakeCase) ''OperationLog)
instance ToJSON LogDetail where
toJSON (LogDetail q e) =
object [ "request" .= q
, "error" .= e
]
data HttpAccessLog
= HttpAccessLog
{ halHttpInfo :: !HttpInfoLog
, halOperation :: !OperationLog
} deriving (Show, Eq)
$(deriveToJSON (aesonDrop 3 snakeCase) ''HttpAccessLog)
ravenLogGen
:: (BL.ByteString, Either QErr BL.ByteString)
-> (N.Status, Maybe Value, Maybe T.Text, Maybe Int64)
ravenLogGen (reqBody, res) =
(status, toJSON <$> logDetail, Just qh, Just size)
where
status = either qeStatus (const N.status200) res
logDetail = either (Just . qErrToLogDetail) (const Nothing) res
reqBodyTxt = TBS.fromLBS reqBody
qErrToLogDetail qErr =
LogDetail reqBodyTxt $ toJSON qErr
size = BL.length $ either encode id res
qh = T.pack . show $ sha1 reqBody
sha1 :: BL.ByteString -> Digest SHA1
sha1 = hash . BL.toStrict
mkAccessLog
:: Maybe UserInfo -- may not have been resolved
-> Request
-> (BL.ByteString, Either QErr BL.ByteString)
-> Maybe (UTCTime, UTCTime)
-> AccessLog
mkAccessLog userInfoM req r mTimeT =
AccessLog
{ alStatus = status
, alMethod = bsToTxt $ requestMethod req
, alSource = bsToTxt $ getSourceFromFallback req
, alPath = bsToTxt $ rawPathInfo req
, alHttpVersion = httpVersion req
, alDetail = mDetail
, alRequestId = bsToTxt <$> getRequestId req
, alHasuraUser = userVars <$> userInfoM
, alResponseSize = size
, alResponseTime = realToFrac <$> diffTime
, alQueryHash = queryHash
data HttpLog
= HttpLog
{ _hlLogLevel :: !L.LogLevel
, _hlLogLing :: !HttpAccessLog
}
instance L.ToEngineLog HttpLog where
toEngineLog (HttpLog logLevel accessLog) =
(logLevel, ELTHttpLog, toJSON accessLog)
mkHttpAccessLog
:: Maybe UserInfo -- may not have been resolved
-> RequestId
-> Wai.Request
-> BL.ByteString
-> Maybe (UTCTime, UTCTime)
-> HttpLog
mkHttpAccessLog userInfoM reqId req res mTimeT =
let http = HttpInfoLog
{ hlStatus = status
, hlMethod = bsToTxt $ Wai.requestMethod req
, hlSource = bsToTxt $ getSourceFromFallback req
, hlPath = bsToTxt $ Wai.rawPathInfo req
, hlHttpVersion = Wai.httpVersion req
}
op = OperationLog
{ olRequestId = reqId
, olUserVars = userVars <$> userInfoM
, olResponseSize = respSize
, olQueryExecutionTime = respTime
, olQuery = Nothing
, olError = Nothing
}
in HttpLog L.LevelInfo $ HttpAccessLog http op
where
(status, mDetail, queryHash, size) = ravenLogGen r
diffTime = case mTimeT of
Nothing -> Nothing
Just (t1, t2) -> Just $ diffUTCTime t2 t1
status = N.status200
respSize = Just $ BL.length res
respTime = computeTimeDiff mTimeT
getSourceFromSocket :: Request -> ByteString
getSourceFromSocket = BS.pack . showSockAddr . remoteHost
mkHttpErrorLog
:: Maybe UserInfo -- may not have been resolved
-> RequestId
-> Wai.Request
-> QErr
-> Maybe Value
-> Maybe (UTCTime, UTCTime)
-> HttpLog
mkHttpErrorLog userInfoM reqId req err query mTimeT =
let http = HttpInfoLog
{ hlStatus = status
, hlMethod = bsToTxt $ Wai.requestMethod req
, hlSource = bsToTxt $ getSourceFromFallback req
, hlPath = bsToTxt $ Wai.rawPathInfo req
, hlHttpVersion = Wai.httpVersion req
}
op = OperationLog
{ olRequestId = reqId
, olUserVars = userVars <$> userInfoM
, olResponseSize = respSize
, olQueryExecutionTime = respTime
, olQuery = toJSON <$> query
, olError = Just $ toJSON err
}
in HttpLog L.LevelError $ HttpAccessLog http op
where
status = qeStatus err
respSize = Just $ BL.length $ encode err
respTime = computeTimeDiff mTimeT
getSourceFromFallback :: Request -> ByteString
computeTimeDiff :: Maybe (UTCTime, UTCTime) -> Maybe Double
computeTimeDiff = fmap (realToFrac . uncurry (flip diffUTCTime))
getSourceFromSocket :: Wai.Request -> ByteString
getSourceFromSocket = BS.pack . showSockAddr . Wai.remoteHost
getSourceFromFallback :: Wai.Request -> ByteString
getSourceFromFallback req = fromMaybe (getSourceFromSocket req) $ getSource req
getSource :: Request -> Maybe ByteString
getSource :: Wai.Request -> Maybe ByteString
getSource req = addr
where
maddr = find (\x -> fst x `elem` ["x-real-ip", "x-forwarded-for"]) hdrs
addr = fmap snd maddr
hdrs = requestHeaders req
requestIdHeader :: T.Text
requestIdHeader = "x-request-id"
getRequestId :: Request -> Maybe ByteString
getRequestId = getRequestHeader $ TE.encodeUtf8 requestIdHeader
getRequestHeader :: ByteString -> Request -> Maybe ByteString
getRequestHeader hdrName req = snd <$> mHeader
where
mHeader = find (\h -> fst h == CI.mk hdrName) hdrs
hdrs = requestHeaders req
hdrs = Wai.requestHeaders req
-- | A type for IP address in numeric string representation.
type NumericAddress = String

View File

@ -1,23 +1,23 @@
module Hasura.Server.Middleware where
import Data.Maybe (fromMaybe)
import Data.Maybe (fromMaybe)
import Network.Wai
import Control.Applicative
import Hasura.Prelude
import Hasura.Server.Cors
import Hasura.Server.Logging (getRequestHeader)
import Hasura.Server.Utils
import qualified Data.ByteString as B
import qualified Data.CaseInsensitive as CI
import qualified Data.Text.Encoding as TE
import qualified Network.HTTP.Types as H
import qualified Data.ByteString as B
import qualified Data.CaseInsensitive as CI
import qualified Data.Text.Encoding as TE
import qualified Network.HTTP.Types as H
corsMiddleware :: CorsPolicy -> Middleware
corsMiddleware policy app req sendResp =
maybe (app req sendResp) handleCors $ getRequestHeader "Origin" req
corsMiddleware policy app req sendResp = do
let origin = getRequestHeader "Origin" $ requestHeaders req
maybe (app req sendResp) handleCors origin
where
handleCors origin = case cpConfig policy of
@ -44,7 +44,8 @@ corsMiddleware policy app req sendResp =
emptyResponse = responseLBS H.status204 [] ""
requestedHeaders =
fromMaybe "" $ getRequestHeader "Access-Control-Request-Headers" req
fromMaybe "" $ getRequestHeader "Access-Control-Request-Headers" $
requestHeaders req
injectCorsHeaders :: B.ByteString -> Response -> Response
injectCorsHeaders origin = setHeaders (mkCorsHeaders origin)

View File

@ -52,7 +52,7 @@ instance ToJSON SchemaSyncThreadLog where
instance ToEngineLog SchemaSyncThreadLog where
toEngineLog threadLog =
(suelLogLevel threadLog, "schema_sync_thread", toJSON threadLog)
(suelLogLevel threadLog, ELTSchemaSyncThread, toJSON threadLog)
data EventPayload
= EventPayload
@ -98,7 +98,7 @@ startSchemaSync sqlGenCtx pool logger httpMgr cacheRef instanceId cacheInitTime
logThreadStarted threadType threadId =
let msg = T.pack (show threadType) <> " thread started"
in unLogger logger $
StartupLog LevelInfo "threads" $
StartupLog LevelInfo "schema-sync" $
object [ "instance_id" .= getInstanceId instanceId
, "thread_id" .= show threadId
, "message" .= msg

View File

@ -6,7 +6,6 @@
module Hasura.Server.Telemetry
( runTelemetry
, getDbId
, generateFingerprint
, mkTelemetryLog
)
where
@ -31,8 +30,6 @@ import qualified Data.ByteString.Lazy as BL
import qualified Data.HashMap.Strict as Map
import qualified Data.String.Conversions as CS
import qualified Data.Text as T
import qualified Data.UUID as UUID
import qualified Data.UUID.V4 as UUID
import qualified Database.PG.Query as Q
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as HTTP
@ -163,9 +160,6 @@ computeMetrics sc =
permsOfTbl = Map.toList . tiRolePermInfoMap
generateFingerprint :: IO Text
generateFingerprint = UUID.toText <$> UUID.nextRandom
getDbId :: Q.TxE QErr Text
getDbId =
(runIdentity . Q.getRow) <$>
@ -210,7 +204,7 @@ instance A.ToJSON TelemetryHttpError where
instance ToEngineLog TelemetryLog where
toEngineLog tl = (_tlLogLevel tl, "telemetry-log", A.toJSON tl)
toEngineLog tl = (_tlLogLevel tl, ELTTelemetryLog, A.toJSON tl)
mkHttpError
:: Text

View File

@ -3,6 +3,8 @@ module Hasura.Server.Utils where
import qualified Database.PG.Query.Connection as Q
import Data.Aeson
import Data.Char
import Data.List (find)
import Data.List.Split
import Data.Time.Clock
import Network.URI
@ -11,11 +13,13 @@ import System.Exit
import System.Process
import qualified Data.ByteString as B
import qualified Data.CaseInsensitive as CI
import qualified Data.HashSet as Set
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
import qualified Data.Text.IO as TI
import qualified Data.UUID as UUID
import qualified Data.UUID.V4 as UUID
import qualified Language.Haskell.TH.Syntax as TH
import qualified Network.HTTP.Client as HC
import qualified Network.HTTP.Types as HTTP
@ -25,6 +29,10 @@ import qualified Text.Regex.TDFA.ByteString as TDFA
import Hasura.Prelude
newtype RequestId
= RequestId { unRequestId :: Text }
deriving (Show, Eq, ToJSON, FromJSON)
jsonHeader :: (T.Text, T.Text)
jsonHeader = ("Content-Type", "application/json; charset=utf-8")
@ -49,11 +57,20 @@ adminSecretHeader = "x-hasura-admin-secret"
userIdHeader :: T.Text
userIdHeader = "x-hasura-user-id"
bsToTxt :: B.ByteString -> T.Text
bsToTxt = TE.decodeUtf8With TE.lenientDecode
requestIdHeader :: T.Text
requestIdHeader = "x-request-id"
txtToBs :: T.Text -> B.ByteString
txtToBs = TE.encodeUtf8
getRequestHeader :: B.ByteString -> [HTTP.Header] -> Maybe B.ByteString
getRequestHeader hdrName hdrs = snd <$> mHeader
where
mHeader = find (\h -> fst h == CI.mk hdrName) hdrs
getRequestId :: (MonadIO m) => [HTTP.Header] -> m RequestId
getRequestId headers =
-- generate a request id for every request if the client has not sent it
case getRequestHeader (txtToBs requestIdHeader) headers of
Nothing -> RequestId <$> liftIO generateFingerprint
Just reqId -> return $ RequestId $ bsToTxt reqId
-- Parsing postgres database url
-- from: https://github.com/futurice/postgresql-simple-url/
@ -167,6 +184,9 @@ diffTimeToMicro diff =
where
aSecond = 1000 * 1000
generateFingerprint :: IO Text
generateFingerprint = UUID.toText <$> UUID.nextRandom
-- json representation of HTTP exception
httpExceptToJSON :: HC.HttpException -> Value
httpExceptToJSON e = case e of
@ -219,3 +239,15 @@ filterResponseHeaders =
filterHeaders :: Set.HashSet HTTP.HeaderName -> [HTTP.Header] -> [HTTP.Header]
filterHeaders list = filter (\(n, _) -> not $ n `Set.member` list)
hyphenate :: String -> String
hyphenate = u . applyFirst toLower
where u [] = []
u (x:xs) | isUpper x = '-' : toLower x : hyphenate xs
| otherwise = x : u xs
applyFirst :: (Char -> Char) -> String -> String
applyFirst _ [] = []
applyFirst f [x] = [f x]
applyFirst f (x:xs) = f x: xs

View File

@ -73,6 +73,15 @@ def pytest_addoption(parser):
help="Run Test cases with allowlist queries enabled"
)
parser.addoption(
"--test-logging",
action="store_true",
default=False,
required=False,
help="Run testcases for logging"
)
#By default,
#1) Set default parallelism to one
#2) Set test grouping to by filename (--dist=loadfile)
@ -140,7 +149,7 @@ def hge_ctx(request):
hge_jwt_conf=hge_jwt_conf,
ws_read_cookie=ws_read_cookie,
metadata_disabled=metadata_disabled,
hge_scale_url=hge_scale_url
hge_scale_url=hge_scale_url,
)
except HGECtxError as e:
pytest.exit(str(e))

View File

@ -228,7 +228,8 @@ class HGECtxGQLServer:
class HGECtx:
def __init__(self, hge_url, pg_url, hge_key, hge_webhook, webhook_insecure,
hge_jwt_key_file, hge_jwt_conf, metadata_disabled, ws_read_cookie, hge_scale_url):
hge_jwt_key_file, hge_jwt_conf, metadata_disabled,
ws_read_cookie, hge_scale_url):
self.http = requests.Session()
self.hge_key = hge_key

View File

@ -186,13 +186,12 @@ class SampleAuthGraphQL(RequestHandler):
def post(self, request):
if not request.json:
return Response(HTTPStatus.BAD_REQUEST)
res = hello_schema.execute(request.json['query'])
res = sample_auth_schema.execute(request.json['query'])
resp = mkJSONResp(res)
resp.headers['Set-Cookie'] = 'abcd'
resp.headers['Custom-Header'] = 'custom-value'
return resp
# GraphQL server with interfaces
class Character(graphene.Interface):

View File

@ -0,0 +1,21 @@
type: bulk
args:
- type: run_sql
args:
sql: |
CREATE TABLE hello (
id SERIAL PRIMARY KEY,
code TEXT,
name TEXT
);
- type: run_sql
args:
sql: |
INSERT INTO hello VALUES
(1, 'abc', 'Some data');
- type: track_table
args:
schema: public
name: hello

View File

@ -0,0 +1,6 @@
type: bulk
args:
- type: run_sql
args:
sql: |
drop table hello

View File

@ -0,0 +1,117 @@
#!/usr/bin/env python3
import json
import os
import time
import pytest
if not pytest.config.getoption("--test-logging"):
pytest.skip("--test-logging missing, skipping tests", allow_module_level=True)
class TestLogging():
dir = 'queries/logging'
def _teardown(self, hge_ctx):
st_code, resp = hge_ctx.v1q_f(self.dir + '/teardown.yaml')
assert st_code == 200, resp
@pytest.fixture(autouse=True)
def transact(self, hge_ctx):
# setup some tables
st_code, resp = hge_ctx.v1q_f(self.dir + '/setup.yaml')
assert st_code == 200, resp
q = {'query': 'query { hello {code name} }'}
headers = {}
if hge_ctx.hge_key:
headers['x-hasura-admin-secret'] = hge_ctx.hge_key
resp = hge_ctx.http.post(hge_ctx.hge_url + '/v1/graphql', json=q,
headers=headers)
assert resp.status_code == 200 and 'data' in resp.json()
self.logs = self._parse_logs(hge_ctx)
# sometimes the log might take time to buffer
time.sleep(2)
yield
self._teardown(hge_ctx)
def _parse_logs(self, hge_ctx):
# parse the log file into a json list
log_file = os.getenv('LOGGING_TEST_LOGFILE_PATH', None)
if not log_file:
print('Could not determine log file path to test logging!')
# force teardown if setup fails
self._teardown(hge_ctx)
assert False
loglines = []
with open(log_file, 'r') as f:
loglines = f.readlines()
logs = list(map(lambda x: json.loads(x.strip()), loglines))
assert len(logs) > 0
return logs
def test_startup_logs(self, hge_ctx):
def _get_server_config(x):
return x['type'] == 'startup' and \
'kind' in x['detail'] and \
x['detail']['kind'] == 'server_configuration'
config_logs = list(filter(_get_server_config, self.logs))
print(config_logs)
assert len(config_logs) == 1
config_log = config_logs[0]
print(config_log)
info = config_log['detail']['info']
# we can improve this later by checking the actual value and
# cross-checking with current config of the server
assert 'enable_console' in info
assert 'port' in info
assert 'server_host' in info
#assert 'transaction_isolation' in info
assert 'admin_secret_set' in info
if hge_ctx.hge_key:
assert info['admin_secret_set'] == True
assert 'auth_hook' in info
assert 'auth_hook_mode' in info
assert 'jwt_secret' in info
assert 'unauth_role' in info
assert 'cors_config' in info
assert 'enable_console' in info
assert 'console_assets_dir' in info
assert 'enable_telemetry' in info
assert 'use_prepared_statements' in info
assert 'stringify_numeric_types' in info
assert 'enabled_apis' in info
assert 'live_query_options' in info
assert 'enable_allowlist' in info
assert 'enabled_log_types' in info
def test_http_log(self, hge_ctx):
def _get_http_logs(x):
return x['type'] == 'http-log'
print('all logs gathered', self.logs)
http_logs = list(filter(_get_http_logs, self.logs))
print('http logs', http_logs)
assert len(http_logs) > 0
http_info = http_logs[0]['detail']['http_info']
assert 'url' in http_info
assert 'status' in http_info
assert 'ip' in http_info
print(http_logs)
operation = http_logs[0]['detail']['operation']
assert 'query_execution_time' in operation
assert 'user_vars' in operation
assert 'request_id' in operation
def test_query_log(self, hge_ctx):
def _get_query_logs(x):
return x['type'] == 'query-log'
query_logs = list(filter(_get_query_logs, self.logs))
assert len(query_logs) > 0
onelog = query_logs[0]['detail']
assert 'request_id' in onelog
assert 'query' in onelog
assert 'query' in onelog['query']
assert 'generated_sql' in onelog

View File

@ -395,7 +395,6 @@ class TestRemoteSchemaResponseHeaders():
resp = hge_ctx.http.post(hge_ctx.hge_url + '/v1/graphql', json=q,
headers=headers)
assert resp.status_code == 200
print(resp.headers)
assert ('Set-Cookie' in resp.headers and
resp.headers['Set-Cookie'] == 'abcd')
res = resp.json()