mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 08:02:15 +03:00
parent
9bd5826020
commit
8a0615ff70
@ -8,12 +8,14 @@ RUN go get github.com/golang/dep/cmd/dep \
|
||||
&& go get github.com/hasura/go-bindata/go-bindata \
|
||||
&& go get github.com/tcnksm/ghr
|
||||
|
||||
# install UPX and netcat
|
||||
# install UPX, netcat and brotli
|
||||
RUN apt-get update && apt-get install -y \
|
||||
xz-utils netcat libpq5 postgresql-client \
|
||||
xz-utils netcat libpq5 postgresql-client git cmake pkgconf \
|
||||
&& curl -Lo /tmp/upx-${upx_version}.tar.xz https://github.com/upx/upx/releases/download/v${upx_version}/upx-${upx_version}-amd64_linux.tar.xz \
|
||||
&& xz -d -c /tmp/upx-${upx_version}.tar.xz \
|
||||
| tar -xOf - upx-${upx_version}-amd64_linux/upx > /bin/upx \
|
||||
&& chmod a+x /bin/upx \
|
||||
&& git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \
|
||||
&& make && make test && make install && ldconfig \
|
||||
&& apt-get -y auto-remove \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
@ -142,7 +142,7 @@ jobs:
|
||||
# changes only contains files in .ciignore
|
||||
check_build_worthiness:
|
||||
docker:
|
||||
- image: hasura/graphql-engine-cli-builder:v0.3
|
||||
- image: hasura/graphql-engine-cli-builder:v0.4
|
||||
working_directory: ~/graphql-engine
|
||||
steps:
|
||||
- attach_workspace:
|
||||
@ -159,7 +159,7 @@ jobs:
|
||||
# build the server binary, and package into docker image
|
||||
build_server:
|
||||
docker:
|
||||
- image: hasura/graphql-engine-server-builder:20190811
|
||||
- image: hasura/graphql-engine-server-builder:20190826
|
||||
working_directory: ~/graphql-engine
|
||||
steps:
|
||||
- attach_workspace:
|
||||
@ -235,7 +235,7 @@ jobs:
|
||||
environment:
|
||||
PG_VERSION: "11_1"
|
||||
docker:
|
||||
- image: hasura/graphql-engine-server-builder:20190811
|
||||
- image: hasura/graphql-engine-server-builder:20190826
|
||||
# TODO: change this to circleci postgis when they have one for pg 11
|
||||
- image: mdillon/postgis:11-alpine
|
||||
<<: *test_pg_env
|
||||
@ -245,7 +245,7 @@ jobs:
|
||||
environment:
|
||||
PG_VERSION: "10_6"
|
||||
docker:
|
||||
- image: hasura/graphql-engine-server-builder:20190811
|
||||
- image: hasura/graphql-engine-server-builder:20190826
|
||||
- image: circleci/postgres:10.6-alpine-postgis
|
||||
<<: *test_pg_env
|
||||
|
||||
@ -254,7 +254,7 @@ jobs:
|
||||
environment:
|
||||
PG_VERSION: "9_6"
|
||||
docker:
|
||||
- image: hasura/graphql-engine-server-builder:20190811
|
||||
- image: hasura/graphql-engine-server-builder:20190826
|
||||
- image: circleci/postgres:9.6-alpine-postgis
|
||||
<<: *test_pg_env
|
||||
|
||||
@ -263,13 +263,13 @@ jobs:
|
||||
environment:
|
||||
PG_VERSION: "9_5"
|
||||
docker:
|
||||
- image: hasura/graphql-engine-server-builder:20190811
|
||||
- image: hasura/graphql-engine-server-builder:20190826
|
||||
- image: circleci/postgres:9.5-alpine-postgis
|
||||
<<: *test_pg_env
|
||||
|
||||
test_cli_with_last_release:
|
||||
docker:
|
||||
- image: hasura/graphql-engine-cli-builder:v0.3
|
||||
- image: hasura/graphql-engine-cli-builder:v0.4
|
||||
- image: circleci/postgres:10-alpine
|
||||
environment:
|
||||
POSTGRES_USER: gql_test
|
||||
@ -302,7 +302,7 @@ jobs:
|
||||
# test and build cli
|
||||
test_and_build_cli:
|
||||
docker:
|
||||
- image: hasura/graphql-engine-cli-builder:v0.3
|
||||
- image: hasura/graphql-engine-cli-builder:v0.4
|
||||
- image: circleci/postgres:10-alpine
|
||||
environment:
|
||||
POSTGRES_USER: gql_test
|
||||
@ -398,7 +398,7 @@ jobs:
|
||||
# test console
|
||||
test_console:
|
||||
docker:
|
||||
- image: hasura/graphql-engine-console-builder:v0.3
|
||||
- image: hasura/graphql-engine-console-builder:v0.4
|
||||
environment:
|
||||
CYPRESS_KEY: 983be0db-0f19-40cc-bfc4-194fcacd85e1
|
||||
GHCRTS: -N1
|
||||
@ -435,7 +435,7 @@ jobs:
|
||||
# test server upgrade from last version to current build
|
||||
test_server_upgrade:
|
||||
docker:
|
||||
- image: hasura/graphql-engine-upgrade-tester:v0.4
|
||||
- image: hasura/graphql-engine-upgrade-tester:v0.5
|
||||
environment:
|
||||
HASURA_GRAPHQL_DATABASE_URL: postgres://gql_test:@localhost:5432/gql_test
|
||||
- image: circleci/postgres:10-alpine
|
||||
|
@ -16,9 +16,14 @@ RUN apt-get update && apt-get install -y \
|
||||
libxss1 \
|
||||
libasound2 \
|
||||
xvfb \
|
||||
git \
|
||||
cmake \
|
||||
pkgconf \
|
||||
&& curl -Lo /tmp/gcloud-${gcloud_version}.tar.gz https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-${gcloud_version}-linux-x86_64.tar.gz \
|
||||
&& tar -xzf /tmp/gcloud-${gcloud_version}.tar.gz -C /usr/local \
|
||||
&& /usr/local/google-cloud-sdk/install.sh \
|
||||
&& git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \
|
||||
&& make && make test && make install && ldconfig \
|
||||
&& apt-get -y auto-remove \
|
||||
&& apt-get -y clean \
|
||||
&& rm -rf /var/lib/apt/lists/* \
|
||||
|
@ -4,12 +4,12 @@ FROM debian:stretch-20190228-slim
|
||||
|
||||
ARG docker_ver="17.09.0-ce"
|
||||
ARG resolver="lts-13.20"
|
||||
ARG stack_ver="1.9.3"
|
||||
ARG stack_ver="2.1.3"
|
||||
ARG postgres_ver="11"
|
||||
|
||||
# Install GNU make, curl, git and docker client. Required to build the server
|
||||
RUN apt-get -y update \
|
||||
&& apt-get -y install curl gnupg2 \
|
||||
&& apt-get -y install curl gnupg2 cmake pkgconf \
|
||||
&& echo "deb http://apt.postgresql.org/pub/repos/apt/ stretch-pgdg main" > /etc/apt/sources.list.d/pgdg.list \
|
||||
&& curl -s https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \
|
||||
&& apt-get -y update \
|
||||
@ -17,6 +17,8 @@ RUN apt-get -y update \
|
||||
&& curl -Lo /tmp/docker-${docker_ver}.tgz https://download.docker.com/linux/static/stable/x86_64/docker-${docker_ver}.tgz \
|
||||
&& tar -xz -C /tmp -f /tmp/docker-${docker_ver}.tgz \
|
||||
&& mv /tmp/docker/* /usr/bin \
|
||||
&& git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \
|
||||
&& make && make test && make install && ldconfig \
|
||||
&& curl -sL https://github.com/commercialhaskell/stack/releases/download/v${stack_ver}/stack-${stack_ver}-linux-x86_64.tar.gz \
|
||||
| tar xz --wildcards --strip-components=1 -C /usr/local/bin '*/stack' \
|
||||
&& stack --resolver ${resolver} setup \
|
||||
|
@ -3,4 +3,9 @@ FROM node:11-slim
|
||||
RUN apt-get update && apt-get install -y \
|
||||
libpq5 \
|
||||
netcat \
|
||||
&& curl -L https://github.com/hasura/graphql-engine/raw/master/cli/get.sh | INSTALL_PATH=/bin bash
|
||||
git \
|
||||
cmake \
|
||||
pkgconf \
|
||||
&& curl -L https://github.com/hasura/graphql-engine/raw/master/cli/get.sh | INSTALL_PATH=/bin bash \
|
||||
&& git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \
|
||||
&& make && make test && make install && ldconfig \
|
||||
|
16
docs/graphql/manual/deployment/compression.rst
Normal file
16
docs/graphql/manual/deployment/compression.rst
Normal file
@ -0,0 +1,16 @@
|
||||
HTTP Compression
|
||||
================
|
||||
|
||||
.. contents:: Table of contents
|
||||
:backlinks: none
|
||||
:depth: 1
|
||||
:local:
|
||||
|
||||
The Hasura GraphQL Engine supports HTTP compression.
|
||||
The server looks for the ``Accept-Encoding`` header in request.
|
||||
If the header contains ``br`` then the server uses `Brotli <https://en.wikipedia.org/wiki/Brotli>`__ compression else if the header contains
|
||||
``gzip`` then the server uses `Gzip <https://en.wikipedia.org/wiki/Gzip>`__ compression.
|
||||
If both values are present then the server prefers ``Brotli`` over ``Gzip``.
|
||||
Also, the server sets the ``Content-Encoding`` response header value to ``br`` for ``Brotli`` compression or ``gzip`` for ``Gzip`` compression.
|
||||
|
||||
**Only responses from "/v1/query" and "/v1/graphql" endpoints are compressed.**
|
@ -22,6 +22,7 @@ The following are a few configuration use cases:
|
||||
- :ref:`cli-with-admin-secret`
|
||||
- :ref:`configure-cors`
|
||||
- :ref:`console-assets-on-server`
|
||||
- :ref:`http-compression`
|
||||
|
||||
.. toctree::
|
||||
:hidden:
|
||||
|
@ -72,7 +72,7 @@ For the ``serve`` sub-command these are the available flags and ENV variables:
|
||||
|
||||
* - ``--enable-console <true|false>``
|
||||
- ``HASURA_GRAPHQL_ENABLE_CONSOLE``
|
||||
- Enable the Hasura Console (served by the server on ``/`` and ``/console``)
|
||||
- Enable the Hasura Console (served by the server on ``/`` and ``/console``) (default: false)
|
||||
|
||||
* - ``--admin-secret <ADMIN_SECRET_KEY>``
|
||||
- ``HASURA_GRAPHQL_ADMIN_SECRET``
|
||||
@ -173,13 +173,13 @@ For the ``serve`` sub-command these are the available flags and ENV variables:
|
||||
|
||||
* - ``--live-queries-multiplexed-batch-size``
|
||||
- ``HASURA_GRAPHQL_LIVE_QUERIES_MULTIPLEXED_BATCH_SIZE``
|
||||
- Multiplexed live queries are split into batches of the specified size. Default: 100
|
||||
- Multiplexed live queries are split into batches of the specified size. Default: 100
|
||||
|
||||
* - ``--enable-allowlist``
|
||||
- ``HASURA_GRAPHQL_ENABLE_ALLOWLIST``
|
||||
- Restrict queries allowed to be executed by the GraphQL engine to those that are part of the configured
|
||||
allow-list. Default: ``false`` *(Available for versions > v1.0.0-beta.1)*
|
||||
|
||||
|
||||
* - ``--console-assets-dir``
|
||||
- ``HASURA_GRAPHQL_CONSOLE_ASSETS_DIR``
|
||||
- Set the value to ``/srv/console-assets`` for the console to load assets from the server itself
|
||||
|
@ -56,3 +56,4 @@ For access to Hasura GraphQL engine logs, check the below page for details:
|
||||
allow-list
|
||||
postgres-permissions
|
||||
Updating GraphQL engine <updating>
|
||||
HTTP Compression <compression>
|
||||
|
@ -8,9 +8,17 @@ own machine and how to contribute.
|
||||
- [stack](https://docs.haskellstack.org/en/stable/README/#how-to-install)
|
||||
- [Node.js](https://nodejs.org/en/) (>= v8.9)
|
||||
- npm >= 5.7
|
||||
- brotli
|
||||
- libpq-dev
|
||||
- python >= 3.5 with pip3
|
||||
|
||||
The Brotli can be installed from source using `git`, `cmake` and `pkgconf` on Debian with:
|
||||
|
||||
$ apt-get -y update \
|
||||
&& apt-get -y install git cmake pkgconf \
|
||||
&& git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \
|
||||
&& make && make test && make install && ldconfig
|
||||
|
||||
The last two prerequisites can be installed on Debian with:
|
||||
|
||||
$ sudo apt install libpq-dev python3 python3-pip python3-venv
|
||||
|
@ -146,6 +146,10 @@ library
|
||||
, filepath >= 1.4
|
||||
, mime-types >= 0.1
|
||||
|
||||
-- HTTP compression
|
||||
, zlib
|
||||
, brotli
|
||||
|
||||
exposed-modules: Hasura.Prelude
|
||||
, Hasura.Logging
|
||||
, Hasura.EncJSON
|
||||
@ -168,6 +172,7 @@ library
|
||||
, Hasura.Server.SchemaUpdate
|
||||
, Hasura.Server.PGDump
|
||||
, Hasura.Server.Config
|
||||
, Hasura.Server.Compression
|
||||
|
||||
, Hasura.RQL.Types
|
||||
, Hasura.RQL.Instances
|
||||
|
7
server/packaging/stack-build.df
Normal file
7
server/packaging/stack-build.df
Normal file
@ -0,0 +1,7 @@
|
||||
FROM fpco/stack-build:lts-13.20
|
||||
|
||||
# Install brotli dependencies
|
||||
RUN apt-get -y update \
|
||||
&& apt-get -y install cmake pkgconf \
|
||||
&& git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \
|
||||
&& make && make test && make install && ldconfig
|
@ -30,9 +30,9 @@ import Hasura.Prelude
|
||||
import Hasura.RQL.DDL.Metadata (fetchMetadata)
|
||||
import Hasura.RQL.Types (SQLGenCtx (..), SchemaCache (..),
|
||||
adminUserInfo, emptySchemaCache)
|
||||
import Hasura.Server.App (HasuraApp(..), SchemaCacheRef (..),
|
||||
getSCFromRef, logInconsObjs,
|
||||
mkWaiApp)
|
||||
import Hasura.Server.App (HasuraApp (..),
|
||||
SchemaCacheRef (..), getSCFromRef,
|
||||
logInconsObjs, mkWaiApp)
|
||||
import Hasura.Server.Auth
|
||||
import Hasura.Server.CheckUpdates (checkForUpdates)
|
||||
import Hasura.Server.Init
|
||||
|
@ -49,6 +49,7 @@ import Hasura.RQL.DDL.Schema
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.Server.Auth (AuthMode (..),
|
||||
getUserInfo)
|
||||
import Hasura.Server.Compression
|
||||
import Hasura.Server.Config (runGetConfig)
|
||||
import Hasura.Server.Context
|
||||
import Hasura.Server.Cors
|
||||
@ -137,11 +138,6 @@ data APIResp
|
||||
= JSONResp !(HttpResponse EncJSON)
|
||||
| RawResp !(HttpResponse BL.ByteString)
|
||||
|
||||
apiRespToLBS :: APIResp -> BL.ByteString
|
||||
apiRespToLBS = \case
|
||||
JSONResp (HttpResponse j _) -> encJToLBS j
|
||||
RawResp (HttpResponse b _) -> b
|
||||
|
||||
data APIHandler a
|
||||
= AHGet !(Handler APIResp)
|
||||
| AHPost !(a -> Handler APIResp)
|
||||
@ -191,22 +187,6 @@ buildQCtx = do
|
||||
sqlGenCtx <- scSQLGenCtx . hcServerCtx <$> ask
|
||||
return $ QCtx userInfo cache sqlGenCtx
|
||||
|
||||
logResult
|
||||
:: (MonadIO m)
|
||||
=> L.Logger
|
||||
-> Maybe UserInfo
|
||||
-> RequestId
|
||||
-> Wai.Request
|
||||
-> Maybe Value
|
||||
-> Either QErr BL.ByteString
|
||||
-> Maybe (UTCTime, UTCTime)
|
||||
-> m ()
|
||||
logResult logger userInfoM reqId httpReq req res qTime = do
|
||||
let logline = case res of
|
||||
Right res' -> mkHttpAccessLog userInfoM reqId httpReq res' qTime
|
||||
Left e -> mkHttpErrorLog userInfoM reqId httpReq e req qTime
|
||||
liftIO $ L.unLogger logger logline
|
||||
|
||||
logSuccess
|
||||
:: (MonadIO m)
|
||||
=> L.Logger
|
||||
@ -215,9 +195,11 @@ logSuccess
|
||||
-> Wai.Request
|
||||
-> BL.ByteString
|
||||
-> Maybe (UTCTime, UTCTime)
|
||||
-> Maybe CompressionType
|
||||
-> m ()
|
||||
logSuccess logger userInfoM reqId httpReq res qTime =
|
||||
liftIO $ L.unLogger logger $ mkHttpAccessLog userInfoM reqId httpReq res qTime
|
||||
logSuccess logger userInfoM reqId httpReq res qTime cType =
|
||||
liftIO $ L.unLogger logger $
|
||||
mkHttpAccessLog userInfoM reqId httpReq res qTime cType
|
||||
|
||||
logError
|
||||
:: (MonadIO m)
|
||||
@ -228,7 +210,8 @@ logError
|
||||
-> Maybe Value
|
||||
-> QErr -> m ()
|
||||
logError logger userInfoM reqId httpReq req qErr =
|
||||
liftIO $ L.unLogger logger $ mkHttpErrorLog userInfoM reqId httpReq qErr req Nothing
|
||||
liftIO $ L.unLogger logger $
|
||||
mkHttpErrorLog userInfoM reqId httpReq qErr req Nothing Nothing
|
||||
|
||||
mkSpockAction
|
||||
:: (MonadIO m, FromJSON a, ToJSON a)
|
||||
@ -287,18 +270,25 @@ mkSpockAction qErrEncoder qErrModifier serverCtx apiHandler = do
|
||||
setStatus $ qeStatus qErr
|
||||
json $ qErrEncoder includeInternal qErr
|
||||
|
||||
logSuccessAndResp userInfo reqId req result qTime = do
|
||||
logSuccess logger userInfo reqId req (apiRespToLBS result) qTime
|
||||
logSuccessAndResp userInfo reqId req result qTime =
|
||||
case result of
|
||||
JSONResp (HttpResponse j h) -> do
|
||||
uncurry setHeader jsonHeader
|
||||
uncurry setHeader (requestIdHeader, unRequestId reqId)
|
||||
mapM_ (mapM_ (uncurry setHeader . unHeader)) h
|
||||
lazyBytes $ encJToLBS j
|
||||
RawResp (HttpResponse b h) -> do
|
||||
uncurry setHeader (requestIdHeader, unRequestId reqId)
|
||||
mapM_ (mapM_ (uncurry setHeader . unHeader)) h
|
||||
lazyBytes b
|
||||
JSONResp (HttpResponse encJson h) ->
|
||||
possiblyCompressedLazyBytes userInfo reqId req qTime (encJToLBS encJson) $
|
||||
pure jsonHeader <> mkHeaders h
|
||||
RawResp (HttpResponse rawBytes h) ->
|
||||
possiblyCompressedLazyBytes userInfo reqId req qTime rawBytes $ mkHeaders h
|
||||
|
||||
possiblyCompressedLazyBytes userInfo reqId req qTime respBytes respHeaders = do
|
||||
let (compressedResp, mEncodingHeader, mCompressionType) =
|
||||
compressResponse (requestHeaders req) respBytes
|
||||
encodingHeader = maybe [] pure mEncodingHeader
|
||||
reqIdHeader = (requestIdHeader, unRequestId reqId)
|
||||
allRespHeaders = pure reqIdHeader <> encodingHeader <> respHeaders
|
||||
logSuccess logger userInfo reqId req compressedResp qTime mCompressionType
|
||||
mapM_ (uncurry setHeader) allRespHeaders
|
||||
lazyBytes compressedResp
|
||||
|
||||
mkHeaders = maybe [] (map unHeader)
|
||||
|
||||
v1QueryHandler :: RQLQuery -> Handler (HttpResponse EncJSON)
|
||||
v1QueryHandler query = do
|
||||
@ -460,8 +450,9 @@ mkWaiApp
|
||||
-> S.HashSet API
|
||||
-> EL.LiveQueriesOptions
|
||||
-> IO HasuraApp
|
||||
mkWaiApp isoLevel loggerCtx sqlGenCtx enableAL pool ci httpManager mode corsCfg
|
||||
enableConsole consoleAssetsDir enableTelemetry instanceId apis lqOpts = do
|
||||
mkWaiApp isoLevel loggerCtx sqlGenCtx enableAL pool ci httpManager mode
|
||||
corsCfg enableConsole consoleAssetsDir enableTelemetry
|
||||
instanceId apis lqOpts = do
|
||||
|
||||
let pgExecCtx = PGExecCtx pool isoLevel
|
||||
pgExecCtxSer = PGExecCtx pool Q.Serializable
|
||||
|
47
server/src-lib/Hasura/Server/Compression.hs
Normal file
47
server/src-lib/Hasura/Server/Compression.hs
Normal file
@ -0,0 +1,47 @@
|
||||
module Hasura.Server.Compression
|
||||
( compressResponse
|
||||
, CompressionType(..)
|
||||
, compressionTypeToTxt
|
||||
)
|
||||
where
|
||||
|
||||
import Hasura.Prelude
|
||||
|
||||
import Hasura.Server.Utils (brHeader, gzipHeader)
|
||||
|
||||
import qualified Codec.Compression.Brotli as BR
|
||||
import qualified Codec.Compression.GZip as GZ
|
||||
import qualified Data.ByteString.Lazy as BL
|
||||
import qualified Data.Text as T
|
||||
import qualified Network.HTTP.Types.Header as NH
|
||||
|
||||
data CompressionType
|
||||
= CTGZip
|
||||
| CTBrotli
|
||||
deriving (Show, Eq)
|
||||
|
||||
compressionTypeToTxt :: CompressionType -> T.Text
|
||||
compressionTypeToTxt CTGZip = "gzip"
|
||||
compressionTypeToTxt CTBrotli = "brotli"
|
||||
|
||||
compressResponse
|
||||
:: NH.RequestHeaders
|
||||
-> BL.ByteString
|
||||
-> (BL.ByteString, Maybe (Text, Text), Maybe CompressionType)
|
||||
compressResponse reqHeaders unCompressedResp =
|
||||
let compressionTypeM = getRequestedCompression reqHeaders
|
||||
appendCompressionType (res, headerM) = (res, headerM, compressionTypeM)
|
||||
in appendCompressionType $ case compressionTypeM of
|
||||
Just CTBrotli -> (BR.compress unCompressedResp, Just brHeader)
|
||||
Just CTGZip -> (GZ.compress unCompressedResp, Just gzipHeader)
|
||||
Nothing -> (unCompressedResp, Nothing)
|
||||
|
||||
getRequestedCompression :: NH.RequestHeaders -> Maybe CompressionType
|
||||
getRequestedCompression reqHeaders
|
||||
| "br" `elem` acceptEncodingVals = Just CTBrotli
|
||||
| "gzip" `elem` acceptEncodingVals = Just CTGZip
|
||||
| otherwise = Nothing
|
||||
where
|
||||
acceptEncodingVals = concatMap (splitHeaderVal . snd) $
|
||||
filter (\h -> fst h == NH.hAcceptEncoding) reqHeaders
|
||||
splitHeaderVal bs = map T.strip $ T.splitOn "," $ bsToTxt bs
|
@ -433,6 +433,9 @@ serveCmdFooter =
|
||||
, [ "# Start GraphQL Engine with telemetry enabled/disabled"
|
||||
, "graphql-engine --database-url <database-url> serve --enable-telemetry true|false"
|
||||
]
|
||||
, [ "# Start GraphQL Engine with HTTP compression enabled for '/v1/query' and '/v1/graphql' endpoints"
|
||||
, "graphql-engine --database-url <database-url> serve --enable-compression"
|
||||
]
|
||||
]
|
||||
|
||||
envVarDoc = mkEnvVarDoc $ envVars <> eventEnvs
|
||||
@ -549,7 +552,7 @@ corsDomainEnv =
|
||||
enableConsoleEnv :: (String, String)
|
||||
enableConsoleEnv =
|
||||
( "HASURA_GRAPHQL_ENABLE_CONSOLE"
|
||||
, "Enable API Console"
|
||||
, "Enable API Console (default: false)"
|
||||
)
|
||||
|
||||
enableTelemetryEnv :: (String, String)
|
||||
|
@ -14,29 +14,30 @@ module Hasura.Server.Logging
|
||||
import Data.Aeson
|
||||
import Data.Aeson.Casing
|
||||
import Data.Aeson.TH
|
||||
import Data.Bits (shift, (.&.))
|
||||
import Data.ByteString.Char8 (ByteString)
|
||||
import Data.Int (Int64)
|
||||
import Data.List (find)
|
||||
import Data.Bits (shift, (.&.))
|
||||
import Data.ByteString.Char8 (ByteString)
|
||||
import Data.Int (Int64)
|
||||
import Data.List (find)
|
||||
import Data.Time.Clock
|
||||
import Data.Word (Word32)
|
||||
import Network.Socket (SockAddr (..))
|
||||
import System.ByteOrder (ByteOrder (..), byteOrder)
|
||||
import Text.Printf (printf)
|
||||
import Data.Word (Word32)
|
||||
import Network.Socket (SockAddr (..))
|
||||
import System.ByteOrder (ByteOrder (..), byteOrder)
|
||||
import Text.Printf (printf)
|
||||
|
||||
import qualified Data.ByteString.Char8 as BS
|
||||
import qualified Data.ByteString.Lazy as BL
|
||||
import qualified Data.Text as T
|
||||
import qualified Network.HTTP.Types as N
|
||||
import qualified Network.Wai as Wai
|
||||
import qualified Data.ByteString.Char8 as BS
|
||||
import qualified Data.ByteString.Lazy as BL
|
||||
import qualified Data.Text as T
|
||||
import qualified Network.HTTP.Types as N
|
||||
import qualified Network.Wai as Wai
|
||||
|
||||
import Hasura.HTTP
|
||||
import Hasura.Logging (EngineLogType (..))
|
||||
import Hasura.Logging (EngineLogType (..))
|
||||
import Hasura.Prelude
|
||||
import Hasura.RQL.Types
|
||||
import Hasura.Server.Compression
|
||||
import Hasura.Server.Utils
|
||||
|
||||
import qualified Hasura.Logging as L
|
||||
import qualified Hasura.Logging as L
|
||||
|
||||
data StartupLog
|
||||
= StartupLog
|
||||
@ -124,15 +125,17 @@ data HttpInfoLog
|
||||
, hlSource :: !T.Text
|
||||
, hlPath :: !T.Text
|
||||
, hlHttpVersion :: !N.HttpVersion
|
||||
, hlCompression :: !(Maybe CompressionType)
|
||||
} deriving (Show, Eq)
|
||||
|
||||
instance ToJSON HttpInfoLog where
|
||||
toJSON (HttpInfoLog st met src path hv) =
|
||||
toJSON (HttpInfoLog st met src path hv compressTypeM) =
|
||||
object [ "status" .= N.statusCode st
|
||||
, "method" .= met
|
||||
, "ip" .= src
|
||||
, "url" .= path
|
||||
, "http_version" .= show hv
|
||||
, "content_encoding" .= (compressionTypeToTxt <$> compressTypeM)
|
||||
]
|
||||
|
||||
-- | Information about a GraphQL/Hasura metadata operation over HTTP
|
||||
@ -170,14 +173,16 @@ mkHttpAccessLog
|
||||
-> Wai.Request
|
||||
-> BL.ByteString
|
||||
-> Maybe (UTCTime, UTCTime)
|
||||
-> Maybe CompressionType
|
||||
-> HttpLog
|
||||
mkHttpAccessLog userInfoM reqId req res mTimeT =
|
||||
mkHttpAccessLog userInfoM reqId req res mTimeT compressTypeM =
|
||||
let http = HttpInfoLog
|
||||
{ hlStatus = status
|
||||
, hlMethod = bsToTxt $ Wai.requestMethod req
|
||||
, hlSource = bsToTxt $ getSourceFromFallback req
|
||||
, hlPath = bsToTxt $ Wai.rawPathInfo req
|
||||
, hlHttpVersion = Wai.httpVersion req
|
||||
, hlCompression = compressTypeM
|
||||
}
|
||||
op = OperationLog
|
||||
{ olRequestId = reqId
|
||||
@ -200,14 +205,16 @@ mkHttpErrorLog
|
||||
-> QErr
|
||||
-> Maybe Value
|
||||
-> Maybe (UTCTime, UTCTime)
|
||||
-> Maybe CompressionType
|
||||
-> HttpLog
|
||||
mkHttpErrorLog userInfoM reqId req err query mTimeT =
|
||||
mkHttpErrorLog userInfoM reqId req err query mTimeT compressTypeM =
|
||||
let http = HttpInfoLog
|
||||
{ hlStatus = status
|
||||
, hlMethod = bsToTxt $ Wai.requestMethod req
|
||||
, hlSource = bsToTxt $ getSourceFromFallback req
|
||||
, hlPath = bsToTxt $ Wai.rawPathInfo req
|
||||
, hlHttpVersion = Wai.httpVersion req
|
||||
, hlCompression = compressTypeM
|
||||
}
|
||||
op = OperationLog
|
||||
{ olRequestId = reqId
|
||||
|
@ -43,6 +43,9 @@ htmlHeader = ("Content-Type", "text/html; charset=utf-8")
|
||||
gzipHeader :: (T.Text, T.Text)
|
||||
gzipHeader = ("Content-Encoding", "gzip")
|
||||
|
||||
brHeader :: (T.Text, T.Text)
|
||||
brHeader = ("Content-Encoding", "br")
|
||||
|
||||
userRoleHeader :: T.Text
|
||||
userRoleHeader = "x-hasura-role"
|
||||
|
||||
|
@ -34,6 +34,8 @@ extra-deps:
|
||||
- Spock-core-0.13.0.0
|
||||
- monad-validate-1.2.0.0
|
||||
|
||||
- brotli-0.0.0.0
|
||||
|
||||
# Override default flag values for local packages and extra-deps
|
||||
flags: {}
|
||||
|
||||
@ -57,3 +59,7 @@ extra-package-dbs: []
|
||||
|
||||
# Allow a newer minor version of GHC than the snapshot specifies
|
||||
# compiler-check: newer-minor
|
||||
|
||||
docker:
|
||||
enable: false
|
||||
repo: 'hasura/stack-build'
|
||||
|
@ -102,6 +102,13 @@ packages:
|
||||
sha256: 8e049bd12ce2bd470909578f2ee8eb80b89d5ff88860afa30e29dd4eafecfa3e
|
||||
original:
|
||||
hackage: monad-validate-1.2.0.0
|
||||
- completed:
|
||||
hackage: brotli-0.0.0.0@sha256:0a8232f028dbc6a1f9db291ef996a5abe74aa00c7c3dc00a741c41f3da75a4dc,2873
|
||||
pantry-tree:
|
||||
size: 407
|
||||
sha256: f4c2e742f10ca010554aeb0037294f118be4f35228acca98c0df97e1093bca33
|
||||
original:
|
||||
hackage: brotli-0.0.0.0
|
||||
snapshots:
|
||||
- completed:
|
||||
size: 498167
|
||||
|
18
server/tests-py/queries/compression/graphql_query.yaml
Normal file
18
server/tests-py/queries/compression/graphql_query.yaml
Normal file
@ -0,0 +1,18 @@
|
||||
url: /v1/graphql
|
||||
query:
|
||||
query: |
|
||||
query {
|
||||
test {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
response:
|
||||
data:
|
||||
test:
|
||||
- id: 1
|
||||
name: Gzip
|
||||
- id: 2
|
||||
name: Brotli
|
||||
- id: 3
|
||||
name: Nothing
|
11
server/tests-py/queries/compression/setup.yaml
Normal file
11
server/tests-py/queries/compression/setup.yaml
Normal file
@ -0,0 +1,11 @@
|
||||
type: bulk
|
||||
args:
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
CREATE TABLE test (id serial primary key, name text);
|
||||
INSERT INTO test (name) values ('Gzip'), ('Brotli'), ('Nothing');
|
||||
- type: track_table
|
||||
args:
|
||||
schema: public
|
||||
name: test
|
7
server/tests-py/queries/compression/teardown.yaml
Normal file
7
server/tests-py/queries/compression/teardown.yaml
Normal file
@ -0,0 +1,7 @@
|
||||
type: bulk
|
||||
args:
|
||||
- type: run_sql
|
||||
args:
|
||||
sql: |
|
||||
DROP TABLE test;
|
||||
cascade: true
|
15
server/tests-py/queries/compression/v1_query.yaml
Normal file
15
server/tests-py/queries/compression/v1_query.yaml
Normal file
@ -0,0 +1,15 @@
|
||||
url: /v1/query
|
||||
query:
|
||||
type: select
|
||||
args:
|
||||
table: test
|
||||
columns:
|
||||
- id
|
||||
- name
|
||||
response:
|
||||
- id: 1
|
||||
name: Gzip
|
||||
- id: 2
|
||||
name: Brotli
|
||||
- id: 3
|
||||
name: Nothing
|
@ -9,3 +9,4 @@ pyjwt >= 1.5.3
|
||||
jsondiff
|
||||
cryptography
|
||||
graphene
|
||||
brotlipy
|
||||
|
@ -33,3 +33,4 @@ urllib3==1.25.3
|
||||
wcwidth==0.1.7
|
||||
websocket-client==0.56.0
|
||||
zipp==0.5.1
|
||||
brotlipy==0.7.0
|
||||
|
90
server/tests-py/test_compression.py
Normal file
90
server/tests-py/test_compression.py
Normal file
@ -0,0 +1,90 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import pytest
|
||||
import yaml
|
||||
import jsondiff
|
||||
|
||||
from super_classes import DefaultTestSelectQueries
|
||||
from validate import json_ordered
|
||||
|
||||
class TestCompression(DefaultTestSelectQueries):
|
||||
|
||||
gzip_header = {'Accept-Encoding': 'gzip'}
|
||||
brotli_header = {'Accept-Encoding': 'br'}
|
||||
gzip_brotli_header = {'Accept-Encoding': 'gzip, br'}
|
||||
|
||||
def _make_post(self, hge_ctx, u, q, h):
|
||||
if hge_ctx.hge_key is not None:
|
||||
h['X-Hasura-Admin-Secret'] = hge_ctx.hge_key
|
||||
resp = hge_ctx.http.post(
|
||||
hge_ctx.hge_url + u,
|
||||
json=q,
|
||||
headers=h
|
||||
)
|
||||
return resp
|
||||
|
||||
def _get_config(self, f):
|
||||
with open(f) as c:
|
||||
conf = yaml.safe_load(c)
|
||||
return conf['url'], conf['query'], conf['response']
|
||||
|
||||
def _assert_status_code_200(self, resp):
|
||||
assert resp.status_code == 200, resp.json()
|
||||
|
||||
def _assert_encoding(self, headers, encoding):
|
||||
assert 'Content-Encoding' in headers, headers
|
||||
assert headers['Content-Encoding'] == encoding, headers
|
||||
|
||||
def _assert_resp(self, resp, exp_resp):
|
||||
json_resp = resp.json()
|
||||
assert json_ordered(json_resp) == json_ordered(exp_resp), yaml.dump({
|
||||
'response': json_resp,
|
||||
'expected': exp_resp,
|
||||
'diff': jsondiff.diff(exp_resp, json_resp)
|
||||
})
|
||||
|
||||
def _assert_gzip(self, resp, exp_resp):
|
||||
self._assert_status_code_200(resp)
|
||||
self._assert_encoding(resp.headers, 'gzip')
|
||||
self._assert_resp(resp, exp_resp)
|
||||
|
||||
def _assert_brotli(self, resp, exp_resp):
|
||||
self._assert_status_code_200(resp)
|
||||
self._assert_encoding(resp.headers, 'br')
|
||||
self._assert_resp(resp, exp_resp)
|
||||
|
||||
def test_gzip_compression_graphql(self, hge_ctx):
|
||||
url, q, exp_resp = self._get_config(self.dir() + '/graphql_query.yaml')
|
||||
resp = self._make_post(hge_ctx, url, q, self.gzip_header)
|
||||
self._assert_gzip(resp, exp_resp)
|
||||
|
||||
def test_gzip_compression_v1_query(self, hge_ctx):
|
||||
url, q, exp_resp = self._get_config(self.dir() + '/v1_query.yaml')
|
||||
resp = self._make_post(hge_ctx, url, q, self.gzip_header)
|
||||
self._assert_gzip(resp, exp_resp)
|
||||
|
||||
def test_brotli_compression_graphql(self, hge_ctx):
|
||||
url, q, exp_resp = self._get_config(self.dir() + '/graphql_query.yaml')
|
||||
resp = self._make_post(hge_ctx, url, q, self.brotli_header)
|
||||
self._assert_brotli(resp, exp_resp)
|
||||
|
||||
def test_brotli_compression_v1_query(self, hge_ctx):
|
||||
url, q, exp_resp = self._get_config(self.dir() + '/v1_query.yaml')
|
||||
resp = self._make_post(hge_ctx, url, q, self.brotli_header)
|
||||
self._assert_brotli(resp, exp_resp)
|
||||
|
||||
|
||||
# If gzip and brotli encoding are requested the server prefers brotli
|
||||
def test_gzip_brotli_graphql_query(self, hge_ctx):
|
||||
url, q, exp_resp = self._get_config(self.dir() + '/graphql_query.yaml')
|
||||
resp = self._make_post(hge_ctx, url, q, self.gzip_brotli_header)
|
||||
self._assert_brotli(resp, exp_resp)
|
||||
|
||||
def test_gzip_brotli_v1_query(self, hge_ctx):
|
||||
url, q, exp_resp = self._get_config(self.dir() + '/v1_query.yaml')
|
||||
resp = self._make_post(hge_ctx, url, q, self.gzip_brotli_header)
|
||||
self._assert_brotli(resp, exp_resp)
|
||||
|
||||
@classmethod
|
||||
def dir(cls):
|
||||
return 'queries/compression'
|
Loading…
Reference in New Issue
Block a user