From 8a0615ff70fa110eef3c8b9f611358a7fadf3261 Mon Sep 17 00:00:00 2001 From: Rakesh Emmadi <12475069+rakeshkky@users.noreply.github.com> Date: Thu, 19 Sep 2019 18:24:40 +0530 Subject: [PATCH] add gzip brotli compression to http responses (close #2674) (#2751) --- .circleci/cli-builder.dockerfile | 6 +- .circleci/config.yml | 20 ++--- .circleci/console-builder.dockerfile | 5 ++ .circleci/server-builder.dockerfile | 6 +- .circleci/server-upgrade/Dockerfile | 7 +- .../graphql/manual/deployment/compression.rst | 16 ++++ .../deployment/graphql-engine-flags/index.rst | 1 + .../graphql-engine-flags/reference.rst | 6 +- docs/graphql/manual/deployment/index.rst | 1 + server/CONTRIBUTING.md | 8 ++ server/graphql-engine.cabal | 5 ++ server/packaging/stack-build.df | 7 ++ server/src-exec/Main.hs | 6 +- server/src-lib/Hasura/Server/App.hs | 65 ++++++-------- server/src-lib/Hasura/Server/Compression.hs | 47 ++++++++++ server/src-lib/Hasura/Server/Init.hs | 5 +- server/src-lib/Hasura/Server/Logging.hs | 43 +++++---- server/src-lib/Hasura/Server/Utils.hs | 3 + server/stack.yaml | 6 ++ server/stack.yaml.lock | 7 ++ .../queries/compression/graphql_query.yaml | 18 ++++ .../tests-py/queries/compression/setup.yaml | 11 +++ .../queries/compression/teardown.yaml | 7 ++ .../queries/compression/v1_query.yaml | 15 ++++ server/tests-py/requirements-top-level.txt | 1 + server/tests-py/requirements.txt | 1 + server/tests-py/test_compression.py | 90 +++++++++++++++++++ 27 files changed, 336 insertions(+), 77 deletions(-) create mode 100644 docs/graphql/manual/deployment/compression.rst create mode 100644 server/packaging/stack-build.df create mode 100644 server/src-lib/Hasura/Server/Compression.hs create mode 100644 server/tests-py/queries/compression/graphql_query.yaml create mode 100644 server/tests-py/queries/compression/setup.yaml create mode 100644 server/tests-py/queries/compression/teardown.yaml create mode 100644 server/tests-py/queries/compression/v1_query.yaml create mode 100644 server/tests-py/test_compression.py diff --git a/.circleci/cli-builder.dockerfile b/.circleci/cli-builder.dockerfile index fd7e89055bf..69272b02e84 100644 --- a/.circleci/cli-builder.dockerfile +++ b/.circleci/cli-builder.dockerfile @@ -8,12 +8,14 @@ RUN go get github.com/golang/dep/cmd/dep \ && go get github.com/hasura/go-bindata/go-bindata \ && go get github.com/tcnksm/ghr -# install UPX and netcat +# install UPX, netcat and brotli RUN apt-get update && apt-get install -y \ - xz-utils netcat libpq5 postgresql-client \ + xz-utils netcat libpq5 postgresql-client git cmake pkgconf \ && curl -Lo /tmp/upx-${upx_version}.tar.xz https://github.com/upx/upx/releases/download/v${upx_version}/upx-${upx_version}-amd64_linux.tar.xz \ && xz -d -c /tmp/upx-${upx_version}.tar.xz \ | tar -xOf - upx-${upx_version}-amd64_linux/upx > /bin/upx \ && chmod a+x /bin/upx \ + && git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \ + && make && make test && make install && ldconfig \ && apt-get -y auto-remove \ && rm -rf /var/lib/apt/lists/* diff --git a/.circleci/config.yml b/.circleci/config.yml index 8022ca007d8..dacb1ad8dae 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -142,7 +142,7 @@ jobs: # changes only contains files in .ciignore check_build_worthiness: docker: - - image: hasura/graphql-engine-cli-builder:v0.3 + - image: hasura/graphql-engine-cli-builder:v0.4 working_directory: ~/graphql-engine steps: - attach_workspace: @@ -159,7 +159,7 @@ jobs: # build the server binary, and package into docker image build_server: docker: - - image: hasura/graphql-engine-server-builder:20190811 + - image: hasura/graphql-engine-server-builder:20190826 working_directory: ~/graphql-engine steps: - attach_workspace: @@ -235,7 +235,7 @@ jobs: environment: PG_VERSION: "11_1" docker: - - image: hasura/graphql-engine-server-builder:20190811 + - image: hasura/graphql-engine-server-builder:20190826 # TODO: change this to circleci postgis when they have one for pg 11 - image: mdillon/postgis:11-alpine <<: *test_pg_env @@ -245,7 +245,7 @@ jobs: environment: PG_VERSION: "10_6" docker: - - image: hasura/graphql-engine-server-builder:20190811 + - image: hasura/graphql-engine-server-builder:20190826 - image: circleci/postgres:10.6-alpine-postgis <<: *test_pg_env @@ -254,7 +254,7 @@ jobs: environment: PG_VERSION: "9_6" docker: - - image: hasura/graphql-engine-server-builder:20190811 + - image: hasura/graphql-engine-server-builder:20190826 - image: circleci/postgres:9.6-alpine-postgis <<: *test_pg_env @@ -263,13 +263,13 @@ jobs: environment: PG_VERSION: "9_5" docker: - - image: hasura/graphql-engine-server-builder:20190811 + - image: hasura/graphql-engine-server-builder:20190826 - image: circleci/postgres:9.5-alpine-postgis <<: *test_pg_env test_cli_with_last_release: docker: - - image: hasura/graphql-engine-cli-builder:v0.3 + - image: hasura/graphql-engine-cli-builder:v0.4 - image: circleci/postgres:10-alpine environment: POSTGRES_USER: gql_test @@ -302,7 +302,7 @@ jobs: # test and build cli test_and_build_cli: docker: - - image: hasura/graphql-engine-cli-builder:v0.3 + - image: hasura/graphql-engine-cli-builder:v0.4 - image: circleci/postgres:10-alpine environment: POSTGRES_USER: gql_test @@ -398,7 +398,7 @@ jobs: # test console test_console: docker: - - image: hasura/graphql-engine-console-builder:v0.3 + - image: hasura/graphql-engine-console-builder:v0.4 environment: CYPRESS_KEY: 983be0db-0f19-40cc-bfc4-194fcacd85e1 GHCRTS: -N1 @@ -435,7 +435,7 @@ jobs: # test server upgrade from last version to current build test_server_upgrade: docker: - - image: hasura/graphql-engine-upgrade-tester:v0.4 + - image: hasura/graphql-engine-upgrade-tester:v0.5 environment: HASURA_GRAPHQL_DATABASE_URL: postgres://gql_test:@localhost:5432/gql_test - image: circleci/postgres:10-alpine diff --git a/.circleci/console-builder.dockerfile b/.circleci/console-builder.dockerfile index a06d2b1c5bf..15ad1645e74 100644 --- a/.circleci/console-builder.dockerfile +++ b/.circleci/console-builder.dockerfile @@ -16,9 +16,14 @@ RUN apt-get update && apt-get install -y \ libxss1 \ libasound2 \ xvfb \ + git \ + cmake \ + pkgconf \ && curl -Lo /tmp/gcloud-${gcloud_version}.tar.gz https://dl.google.com/dl/cloudsdk/channels/rapid/downloads/google-cloud-sdk-${gcloud_version}-linux-x86_64.tar.gz \ && tar -xzf /tmp/gcloud-${gcloud_version}.tar.gz -C /usr/local \ && /usr/local/google-cloud-sdk/install.sh \ + && git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \ + && make && make test && make install && ldconfig \ && apt-get -y auto-remove \ && apt-get -y clean \ && rm -rf /var/lib/apt/lists/* \ diff --git a/.circleci/server-builder.dockerfile b/.circleci/server-builder.dockerfile index 683f2ababb4..249f963381b 100644 --- a/.circleci/server-builder.dockerfile +++ b/.circleci/server-builder.dockerfile @@ -4,12 +4,12 @@ FROM debian:stretch-20190228-slim ARG docker_ver="17.09.0-ce" ARG resolver="lts-13.20" -ARG stack_ver="1.9.3" +ARG stack_ver="2.1.3" ARG postgres_ver="11" # Install GNU make, curl, git and docker client. Required to build the server RUN apt-get -y update \ - && apt-get -y install curl gnupg2 \ + && apt-get -y install curl gnupg2 cmake pkgconf \ && echo "deb http://apt.postgresql.org/pub/repos/apt/ stretch-pgdg main" > /etc/apt/sources.list.d/pgdg.list \ && curl -s https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - \ && apt-get -y update \ @@ -17,6 +17,8 @@ RUN apt-get -y update \ && curl -Lo /tmp/docker-${docker_ver}.tgz https://download.docker.com/linux/static/stable/x86_64/docker-${docker_ver}.tgz \ && tar -xz -C /tmp -f /tmp/docker-${docker_ver}.tgz \ && mv /tmp/docker/* /usr/bin \ + && git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \ + && make && make test && make install && ldconfig \ && curl -sL https://github.com/commercialhaskell/stack/releases/download/v${stack_ver}/stack-${stack_ver}-linux-x86_64.tar.gz \ | tar xz --wildcards --strip-components=1 -C /usr/local/bin '*/stack' \ && stack --resolver ${resolver} setup \ diff --git a/.circleci/server-upgrade/Dockerfile b/.circleci/server-upgrade/Dockerfile index 5886ba65ee8..e81f184ea0d 100644 --- a/.circleci/server-upgrade/Dockerfile +++ b/.circleci/server-upgrade/Dockerfile @@ -3,4 +3,9 @@ FROM node:11-slim RUN apt-get update && apt-get install -y \ libpq5 \ netcat \ - && curl -L https://github.com/hasura/graphql-engine/raw/master/cli/get.sh | INSTALL_PATH=/bin bash + git \ + cmake \ + pkgconf \ + && curl -L https://github.com/hasura/graphql-engine/raw/master/cli/get.sh | INSTALL_PATH=/bin bash \ + && git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \ + && make && make test && make install && ldconfig \ diff --git a/docs/graphql/manual/deployment/compression.rst b/docs/graphql/manual/deployment/compression.rst new file mode 100644 index 00000000000..93ef7da5808 --- /dev/null +++ b/docs/graphql/manual/deployment/compression.rst @@ -0,0 +1,16 @@ +HTTP Compression +================ + +.. contents:: Table of contents + :backlinks: none + :depth: 1 + :local: + +The Hasura GraphQL Engine supports HTTP compression. +The server looks for the ``Accept-Encoding`` header in request. +If the header contains ``br`` then the server uses `Brotli `__ compression else if the header contains +``gzip`` then the server uses `Gzip `__ compression. +If both values are present then the server prefers ``Brotli`` over ``Gzip``. +Also, the server sets the ``Content-Encoding`` response header value to ``br`` for ``Brotli`` compression or ``gzip`` for ``Gzip`` compression. + +**Only responses from "/v1/query" and "/v1/graphql" endpoints are compressed.** diff --git a/docs/graphql/manual/deployment/graphql-engine-flags/index.rst b/docs/graphql/manual/deployment/graphql-engine-flags/index.rst index 804a61c4d48..78f086c65f0 100644 --- a/docs/graphql/manual/deployment/graphql-engine-flags/index.rst +++ b/docs/graphql/manual/deployment/graphql-engine-flags/index.rst @@ -22,6 +22,7 @@ The following are a few configuration use cases: - :ref:`cli-with-admin-secret` - :ref:`configure-cors` - :ref:`console-assets-on-server` +- :ref:`http-compression` .. toctree:: :hidden: diff --git a/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst b/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst index 0f71830fad4..57c30a79bfe 100644 --- a/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst +++ b/docs/graphql/manual/deployment/graphql-engine-flags/reference.rst @@ -72,7 +72,7 @@ For the ``serve`` sub-command these are the available flags and ENV variables: * - ``--enable-console `` - ``HASURA_GRAPHQL_ENABLE_CONSOLE`` - - Enable the Hasura Console (served by the server on ``/`` and ``/console``) + - Enable the Hasura Console (served by the server on ``/`` and ``/console``) (default: false) * - ``--admin-secret `` - ``HASURA_GRAPHQL_ADMIN_SECRET`` @@ -173,13 +173,13 @@ For the ``serve`` sub-command these are the available flags and ENV variables: * - ``--live-queries-multiplexed-batch-size`` - ``HASURA_GRAPHQL_LIVE_QUERIES_MULTIPLEXED_BATCH_SIZE`` - - Multiplexed live queries are split into batches of the specified size. Default: 100 + - Multiplexed live queries are split into batches of the specified size. Default: 100 * - ``--enable-allowlist`` - ``HASURA_GRAPHQL_ENABLE_ALLOWLIST`` - Restrict queries allowed to be executed by the GraphQL engine to those that are part of the configured allow-list. Default: ``false`` *(Available for versions > v1.0.0-beta.1)* - + * - ``--console-assets-dir`` - ``HASURA_GRAPHQL_CONSOLE_ASSETS_DIR`` - Set the value to ``/srv/console-assets`` for the console to load assets from the server itself diff --git a/docs/graphql/manual/deployment/index.rst b/docs/graphql/manual/deployment/index.rst index 41dcb4c053d..08bc857cf69 100644 --- a/docs/graphql/manual/deployment/index.rst +++ b/docs/graphql/manual/deployment/index.rst @@ -56,3 +56,4 @@ For access to Hasura GraphQL engine logs, check the below page for details: allow-list postgres-permissions Updating GraphQL engine + HTTP Compression diff --git a/server/CONTRIBUTING.md b/server/CONTRIBUTING.md index 1296ebde3ea..cab2daeff2d 100644 --- a/server/CONTRIBUTING.md +++ b/server/CONTRIBUTING.md @@ -8,9 +8,17 @@ own machine and how to contribute. - [stack](https://docs.haskellstack.org/en/stable/README/#how-to-install) - [Node.js](https://nodejs.org/en/) (>= v8.9) - npm >= 5.7 +- brotli - libpq-dev - python >= 3.5 with pip3 +The Brotli can be installed from source using `git`, `cmake` and `pkgconf` on Debian with: + + $ apt-get -y update \ + && apt-get -y install git cmake pkgconf \ + && git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \ + && make && make test && make install && ldconfig + The last two prerequisites can be installed on Debian with: $ sudo apt install libpq-dev python3 python3-pip python3-venv diff --git a/server/graphql-engine.cabal b/server/graphql-engine.cabal index f3d080c0dd6..cca9401b1e1 100644 --- a/server/graphql-engine.cabal +++ b/server/graphql-engine.cabal @@ -146,6 +146,10 @@ library , filepath >= 1.4 , mime-types >= 0.1 + -- HTTP compression + , zlib + , brotli + exposed-modules: Hasura.Prelude , Hasura.Logging , Hasura.EncJSON @@ -168,6 +172,7 @@ library , Hasura.Server.SchemaUpdate , Hasura.Server.PGDump , Hasura.Server.Config + , Hasura.Server.Compression , Hasura.RQL.Types , Hasura.RQL.Instances diff --git a/server/packaging/stack-build.df b/server/packaging/stack-build.df new file mode 100644 index 00000000000..b4376662241 --- /dev/null +++ b/server/packaging/stack-build.df @@ -0,0 +1,7 @@ +FROM fpco/stack-build:lts-13.20 + +# Install brotli dependencies +RUN apt-get -y update \ + && apt-get -y install cmake pkgconf \ + && git clone https://github.com/google/brotli.git && cd brotli && mkdir out && cd out && ../configure-cmake \ + && make && make test && make install && ldconfig diff --git a/server/src-exec/Main.hs b/server/src-exec/Main.hs index fbc10429552..d4f3104ed17 100644 --- a/server/src-exec/Main.hs +++ b/server/src-exec/Main.hs @@ -30,9 +30,9 @@ import Hasura.Prelude import Hasura.RQL.DDL.Metadata (fetchMetadata) import Hasura.RQL.Types (SQLGenCtx (..), SchemaCache (..), adminUserInfo, emptySchemaCache) -import Hasura.Server.App (HasuraApp(..), SchemaCacheRef (..), - getSCFromRef, logInconsObjs, - mkWaiApp) +import Hasura.Server.App (HasuraApp (..), + SchemaCacheRef (..), getSCFromRef, + logInconsObjs, mkWaiApp) import Hasura.Server.Auth import Hasura.Server.CheckUpdates (checkForUpdates) import Hasura.Server.Init diff --git a/server/src-lib/Hasura/Server/App.hs b/server/src-lib/Hasura/Server/App.hs index 03db4997581..f827aa050d4 100644 --- a/server/src-lib/Hasura/Server/App.hs +++ b/server/src-lib/Hasura/Server/App.hs @@ -49,6 +49,7 @@ import Hasura.RQL.DDL.Schema import Hasura.RQL.Types import Hasura.Server.Auth (AuthMode (..), getUserInfo) +import Hasura.Server.Compression import Hasura.Server.Config (runGetConfig) import Hasura.Server.Context import Hasura.Server.Cors @@ -137,11 +138,6 @@ data APIResp = JSONResp !(HttpResponse EncJSON) | RawResp !(HttpResponse BL.ByteString) -apiRespToLBS :: APIResp -> BL.ByteString -apiRespToLBS = \case - JSONResp (HttpResponse j _) -> encJToLBS j - RawResp (HttpResponse b _) -> b - data APIHandler a = AHGet !(Handler APIResp) | AHPost !(a -> Handler APIResp) @@ -191,22 +187,6 @@ buildQCtx = do sqlGenCtx <- scSQLGenCtx . hcServerCtx <$> ask return $ QCtx userInfo cache sqlGenCtx -logResult - :: (MonadIO m) - => L.Logger - -> Maybe UserInfo - -> RequestId - -> Wai.Request - -> Maybe Value - -> Either QErr BL.ByteString - -> Maybe (UTCTime, UTCTime) - -> m () -logResult logger userInfoM reqId httpReq req res qTime = do - let logline = case res of - Right res' -> mkHttpAccessLog userInfoM reqId httpReq res' qTime - Left e -> mkHttpErrorLog userInfoM reqId httpReq e req qTime - liftIO $ L.unLogger logger logline - logSuccess :: (MonadIO m) => L.Logger @@ -215,9 +195,11 @@ logSuccess -> Wai.Request -> BL.ByteString -> Maybe (UTCTime, UTCTime) + -> Maybe CompressionType -> m () -logSuccess logger userInfoM reqId httpReq res qTime = - liftIO $ L.unLogger logger $ mkHttpAccessLog userInfoM reqId httpReq res qTime +logSuccess logger userInfoM reqId httpReq res qTime cType = + liftIO $ L.unLogger logger $ + mkHttpAccessLog userInfoM reqId httpReq res qTime cType logError :: (MonadIO m) @@ -228,7 +210,8 @@ logError -> Maybe Value -> QErr -> m () logError logger userInfoM reqId httpReq req qErr = - liftIO $ L.unLogger logger $ mkHttpErrorLog userInfoM reqId httpReq qErr req Nothing + liftIO $ L.unLogger logger $ + mkHttpErrorLog userInfoM reqId httpReq qErr req Nothing Nothing mkSpockAction :: (MonadIO m, FromJSON a, ToJSON a) @@ -287,18 +270,25 @@ mkSpockAction qErrEncoder qErrModifier serverCtx apiHandler = do setStatus $ qeStatus qErr json $ qErrEncoder includeInternal qErr - logSuccessAndResp userInfo reqId req result qTime = do - logSuccess logger userInfo reqId req (apiRespToLBS result) qTime + logSuccessAndResp userInfo reqId req result qTime = case result of - JSONResp (HttpResponse j h) -> do - uncurry setHeader jsonHeader - uncurry setHeader (requestIdHeader, unRequestId reqId) - mapM_ (mapM_ (uncurry setHeader . unHeader)) h - lazyBytes $ encJToLBS j - RawResp (HttpResponse b h) -> do - uncurry setHeader (requestIdHeader, unRequestId reqId) - mapM_ (mapM_ (uncurry setHeader . unHeader)) h - lazyBytes b + JSONResp (HttpResponse encJson h) -> + possiblyCompressedLazyBytes userInfo reqId req qTime (encJToLBS encJson) $ + pure jsonHeader <> mkHeaders h + RawResp (HttpResponse rawBytes h) -> + possiblyCompressedLazyBytes userInfo reqId req qTime rawBytes $ mkHeaders h + + possiblyCompressedLazyBytes userInfo reqId req qTime respBytes respHeaders = do + let (compressedResp, mEncodingHeader, mCompressionType) = + compressResponse (requestHeaders req) respBytes + encodingHeader = maybe [] pure mEncodingHeader + reqIdHeader = (requestIdHeader, unRequestId reqId) + allRespHeaders = pure reqIdHeader <> encodingHeader <> respHeaders + logSuccess logger userInfo reqId req compressedResp qTime mCompressionType + mapM_ (uncurry setHeader) allRespHeaders + lazyBytes compressedResp + + mkHeaders = maybe [] (map unHeader) v1QueryHandler :: RQLQuery -> Handler (HttpResponse EncJSON) v1QueryHandler query = do @@ -460,8 +450,9 @@ mkWaiApp -> S.HashSet API -> EL.LiveQueriesOptions -> IO HasuraApp -mkWaiApp isoLevel loggerCtx sqlGenCtx enableAL pool ci httpManager mode corsCfg - enableConsole consoleAssetsDir enableTelemetry instanceId apis lqOpts = do +mkWaiApp isoLevel loggerCtx sqlGenCtx enableAL pool ci httpManager mode + corsCfg enableConsole consoleAssetsDir enableTelemetry + instanceId apis lqOpts = do let pgExecCtx = PGExecCtx pool isoLevel pgExecCtxSer = PGExecCtx pool Q.Serializable diff --git a/server/src-lib/Hasura/Server/Compression.hs b/server/src-lib/Hasura/Server/Compression.hs new file mode 100644 index 00000000000..d35a91ba8bb --- /dev/null +++ b/server/src-lib/Hasura/Server/Compression.hs @@ -0,0 +1,47 @@ +module Hasura.Server.Compression + ( compressResponse + , CompressionType(..) + , compressionTypeToTxt + ) +where + +import Hasura.Prelude + +import Hasura.Server.Utils (brHeader, gzipHeader) + +import qualified Codec.Compression.Brotli as BR +import qualified Codec.Compression.GZip as GZ +import qualified Data.ByteString.Lazy as BL +import qualified Data.Text as T +import qualified Network.HTTP.Types.Header as NH + +data CompressionType + = CTGZip + | CTBrotli + deriving (Show, Eq) + +compressionTypeToTxt :: CompressionType -> T.Text +compressionTypeToTxt CTGZip = "gzip" +compressionTypeToTxt CTBrotli = "brotli" + +compressResponse + :: NH.RequestHeaders + -> BL.ByteString + -> (BL.ByteString, Maybe (Text, Text), Maybe CompressionType) +compressResponse reqHeaders unCompressedResp = + let compressionTypeM = getRequestedCompression reqHeaders + appendCompressionType (res, headerM) = (res, headerM, compressionTypeM) + in appendCompressionType $ case compressionTypeM of + Just CTBrotli -> (BR.compress unCompressedResp, Just brHeader) + Just CTGZip -> (GZ.compress unCompressedResp, Just gzipHeader) + Nothing -> (unCompressedResp, Nothing) + +getRequestedCompression :: NH.RequestHeaders -> Maybe CompressionType +getRequestedCompression reqHeaders + | "br" `elem` acceptEncodingVals = Just CTBrotli + | "gzip" `elem` acceptEncodingVals = Just CTGZip + | otherwise = Nothing + where + acceptEncodingVals = concatMap (splitHeaderVal . snd) $ + filter (\h -> fst h == NH.hAcceptEncoding) reqHeaders + splitHeaderVal bs = map T.strip $ T.splitOn "," $ bsToTxt bs diff --git a/server/src-lib/Hasura/Server/Init.hs b/server/src-lib/Hasura/Server/Init.hs index 3e1eb3d6740..24a89509a45 100644 --- a/server/src-lib/Hasura/Server/Init.hs +++ b/server/src-lib/Hasura/Server/Init.hs @@ -433,6 +433,9 @@ serveCmdFooter = , [ "# Start GraphQL Engine with telemetry enabled/disabled" , "graphql-engine --database-url serve --enable-telemetry true|false" ] + , [ "# Start GraphQL Engine with HTTP compression enabled for '/v1/query' and '/v1/graphql' endpoints" + , "graphql-engine --database-url serve --enable-compression" + ] ] envVarDoc = mkEnvVarDoc $ envVars <> eventEnvs @@ -549,7 +552,7 @@ corsDomainEnv = enableConsoleEnv :: (String, String) enableConsoleEnv = ( "HASURA_GRAPHQL_ENABLE_CONSOLE" - , "Enable API Console" + , "Enable API Console (default: false)" ) enableTelemetryEnv :: (String, String) diff --git a/server/src-lib/Hasura/Server/Logging.hs b/server/src-lib/Hasura/Server/Logging.hs index d52d900e062..3e499f2dcb3 100644 --- a/server/src-lib/Hasura/Server/Logging.hs +++ b/server/src-lib/Hasura/Server/Logging.hs @@ -14,29 +14,30 @@ module Hasura.Server.Logging import Data.Aeson import Data.Aeson.Casing import Data.Aeson.TH -import Data.Bits (shift, (.&.)) -import Data.ByteString.Char8 (ByteString) -import Data.Int (Int64) -import Data.List (find) +import Data.Bits (shift, (.&.)) +import Data.ByteString.Char8 (ByteString) +import Data.Int (Int64) +import Data.List (find) import Data.Time.Clock -import Data.Word (Word32) -import Network.Socket (SockAddr (..)) -import System.ByteOrder (ByteOrder (..), byteOrder) -import Text.Printf (printf) +import Data.Word (Word32) +import Network.Socket (SockAddr (..)) +import System.ByteOrder (ByteOrder (..), byteOrder) +import Text.Printf (printf) -import qualified Data.ByteString.Char8 as BS -import qualified Data.ByteString.Lazy as BL -import qualified Data.Text as T -import qualified Network.HTTP.Types as N -import qualified Network.Wai as Wai +import qualified Data.ByteString.Char8 as BS +import qualified Data.ByteString.Lazy as BL +import qualified Data.Text as T +import qualified Network.HTTP.Types as N +import qualified Network.Wai as Wai import Hasura.HTTP -import Hasura.Logging (EngineLogType (..)) +import Hasura.Logging (EngineLogType (..)) import Hasura.Prelude import Hasura.RQL.Types +import Hasura.Server.Compression import Hasura.Server.Utils -import qualified Hasura.Logging as L +import qualified Hasura.Logging as L data StartupLog = StartupLog @@ -124,15 +125,17 @@ data HttpInfoLog , hlSource :: !T.Text , hlPath :: !T.Text , hlHttpVersion :: !N.HttpVersion + , hlCompression :: !(Maybe CompressionType) } deriving (Show, Eq) instance ToJSON HttpInfoLog where - toJSON (HttpInfoLog st met src path hv) = + toJSON (HttpInfoLog st met src path hv compressTypeM) = object [ "status" .= N.statusCode st , "method" .= met , "ip" .= src , "url" .= path , "http_version" .= show hv + , "content_encoding" .= (compressionTypeToTxt <$> compressTypeM) ] -- | Information about a GraphQL/Hasura metadata operation over HTTP @@ -170,14 +173,16 @@ mkHttpAccessLog -> Wai.Request -> BL.ByteString -> Maybe (UTCTime, UTCTime) + -> Maybe CompressionType -> HttpLog -mkHttpAccessLog userInfoM reqId req res mTimeT = +mkHttpAccessLog userInfoM reqId req res mTimeT compressTypeM = let http = HttpInfoLog { hlStatus = status , hlMethod = bsToTxt $ Wai.requestMethod req , hlSource = bsToTxt $ getSourceFromFallback req , hlPath = bsToTxt $ Wai.rawPathInfo req , hlHttpVersion = Wai.httpVersion req + , hlCompression = compressTypeM } op = OperationLog { olRequestId = reqId @@ -200,14 +205,16 @@ mkHttpErrorLog -> QErr -> Maybe Value -> Maybe (UTCTime, UTCTime) + -> Maybe CompressionType -> HttpLog -mkHttpErrorLog userInfoM reqId req err query mTimeT = +mkHttpErrorLog userInfoM reqId req err query mTimeT compressTypeM = let http = HttpInfoLog { hlStatus = status , hlMethod = bsToTxt $ Wai.requestMethod req , hlSource = bsToTxt $ getSourceFromFallback req , hlPath = bsToTxt $ Wai.rawPathInfo req , hlHttpVersion = Wai.httpVersion req + , hlCompression = compressTypeM } op = OperationLog { olRequestId = reqId diff --git a/server/src-lib/Hasura/Server/Utils.hs b/server/src-lib/Hasura/Server/Utils.hs index 990fe017c3a..c85fc37b72a 100644 --- a/server/src-lib/Hasura/Server/Utils.hs +++ b/server/src-lib/Hasura/Server/Utils.hs @@ -43,6 +43,9 @@ htmlHeader = ("Content-Type", "text/html; charset=utf-8") gzipHeader :: (T.Text, T.Text) gzipHeader = ("Content-Encoding", "gzip") +brHeader :: (T.Text, T.Text) +brHeader = ("Content-Encoding", "br") + userRoleHeader :: T.Text userRoleHeader = "x-hasura-role" diff --git a/server/stack.yaml b/server/stack.yaml index 90a4a38c714..ca9a0bfca6b 100644 --- a/server/stack.yaml +++ b/server/stack.yaml @@ -34,6 +34,8 @@ extra-deps: - Spock-core-0.13.0.0 - monad-validate-1.2.0.0 +- brotli-0.0.0.0 + # Override default flag values for local packages and extra-deps flags: {} @@ -57,3 +59,7 @@ extra-package-dbs: [] # Allow a newer minor version of GHC than the snapshot specifies # compiler-check: newer-minor + +docker: + enable: false + repo: 'hasura/stack-build' diff --git a/server/stack.yaml.lock b/server/stack.yaml.lock index f3d6893d3dd..b96d6052e3c 100644 --- a/server/stack.yaml.lock +++ b/server/stack.yaml.lock @@ -102,6 +102,13 @@ packages: sha256: 8e049bd12ce2bd470909578f2ee8eb80b89d5ff88860afa30e29dd4eafecfa3e original: hackage: monad-validate-1.2.0.0 +- completed: + hackage: brotli-0.0.0.0@sha256:0a8232f028dbc6a1f9db291ef996a5abe74aa00c7c3dc00a741c41f3da75a4dc,2873 + pantry-tree: + size: 407 + sha256: f4c2e742f10ca010554aeb0037294f118be4f35228acca98c0df97e1093bca33 + original: + hackage: brotli-0.0.0.0 snapshots: - completed: size: 498167 diff --git a/server/tests-py/queries/compression/graphql_query.yaml b/server/tests-py/queries/compression/graphql_query.yaml new file mode 100644 index 00000000000..bf0ccbf657f --- /dev/null +++ b/server/tests-py/queries/compression/graphql_query.yaml @@ -0,0 +1,18 @@ +url: /v1/graphql +query: + query: | + query { + test { + id + name + } + } +response: + data: + test: + - id: 1 + name: Gzip + - id: 2 + name: Brotli + - id: 3 + name: Nothing diff --git a/server/tests-py/queries/compression/setup.yaml b/server/tests-py/queries/compression/setup.yaml new file mode 100644 index 00000000000..8d87f149233 --- /dev/null +++ b/server/tests-py/queries/compression/setup.yaml @@ -0,0 +1,11 @@ +type: bulk +args: +- type: run_sql + args: + sql: | + CREATE TABLE test (id serial primary key, name text); + INSERT INTO test (name) values ('Gzip'), ('Brotli'), ('Nothing'); +- type: track_table + args: + schema: public + name: test diff --git a/server/tests-py/queries/compression/teardown.yaml b/server/tests-py/queries/compression/teardown.yaml new file mode 100644 index 00000000000..a76f634c892 --- /dev/null +++ b/server/tests-py/queries/compression/teardown.yaml @@ -0,0 +1,7 @@ +type: bulk +args: +- type: run_sql + args: + sql: | + DROP TABLE test; + cascade: true diff --git a/server/tests-py/queries/compression/v1_query.yaml b/server/tests-py/queries/compression/v1_query.yaml new file mode 100644 index 00000000000..883b8b717d4 --- /dev/null +++ b/server/tests-py/queries/compression/v1_query.yaml @@ -0,0 +1,15 @@ +url: /v1/query +query: + type: select + args: + table: test + columns: + - id + - name +response: + - id: 1 + name: Gzip + - id: 2 + name: Brotli + - id: 3 + name: Nothing diff --git a/server/tests-py/requirements-top-level.txt b/server/tests-py/requirements-top-level.txt index 1e526c5eaa1..6b5ca6e6820 100644 --- a/server/tests-py/requirements-top-level.txt +++ b/server/tests-py/requirements-top-level.txt @@ -9,3 +9,4 @@ pyjwt >= 1.5.3 jsondiff cryptography graphene +brotlipy diff --git a/server/tests-py/requirements.txt b/server/tests-py/requirements.txt index add42c1effe..dea895c5238 100644 --- a/server/tests-py/requirements.txt +++ b/server/tests-py/requirements.txt @@ -33,3 +33,4 @@ urllib3==1.25.3 wcwidth==0.1.7 websocket-client==0.56.0 zipp==0.5.1 +brotlipy==0.7.0 diff --git a/server/tests-py/test_compression.py b/server/tests-py/test_compression.py new file mode 100644 index 00000000000..552b51432bb --- /dev/null +++ b/server/tests-py/test_compression.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python3 + +import pytest +import yaml +import jsondiff + +from super_classes import DefaultTestSelectQueries +from validate import json_ordered + +class TestCompression(DefaultTestSelectQueries): + + gzip_header = {'Accept-Encoding': 'gzip'} + brotli_header = {'Accept-Encoding': 'br'} + gzip_brotli_header = {'Accept-Encoding': 'gzip, br'} + + def _make_post(self, hge_ctx, u, q, h): + if hge_ctx.hge_key is not None: + h['X-Hasura-Admin-Secret'] = hge_ctx.hge_key + resp = hge_ctx.http.post( + hge_ctx.hge_url + u, + json=q, + headers=h + ) + return resp + + def _get_config(self, f): + with open(f) as c: + conf = yaml.safe_load(c) + return conf['url'], conf['query'], conf['response'] + + def _assert_status_code_200(self, resp): + assert resp.status_code == 200, resp.json() + + def _assert_encoding(self, headers, encoding): + assert 'Content-Encoding' in headers, headers + assert headers['Content-Encoding'] == encoding, headers + + def _assert_resp(self, resp, exp_resp): + json_resp = resp.json() + assert json_ordered(json_resp) == json_ordered(exp_resp), yaml.dump({ + 'response': json_resp, + 'expected': exp_resp, + 'diff': jsondiff.diff(exp_resp, json_resp) + }) + + def _assert_gzip(self, resp, exp_resp): + self._assert_status_code_200(resp) + self._assert_encoding(resp.headers, 'gzip') + self._assert_resp(resp, exp_resp) + + def _assert_brotli(self, resp, exp_resp): + self._assert_status_code_200(resp) + self._assert_encoding(resp.headers, 'br') + self._assert_resp(resp, exp_resp) + + def test_gzip_compression_graphql(self, hge_ctx): + url, q, exp_resp = self._get_config(self.dir() + '/graphql_query.yaml') + resp = self._make_post(hge_ctx, url, q, self.gzip_header) + self._assert_gzip(resp, exp_resp) + + def test_gzip_compression_v1_query(self, hge_ctx): + url, q, exp_resp = self._get_config(self.dir() + '/v1_query.yaml') + resp = self._make_post(hge_ctx, url, q, self.gzip_header) + self._assert_gzip(resp, exp_resp) + + def test_brotli_compression_graphql(self, hge_ctx): + url, q, exp_resp = self._get_config(self.dir() + '/graphql_query.yaml') + resp = self._make_post(hge_ctx, url, q, self.brotli_header) + self._assert_brotli(resp, exp_resp) + + def test_brotli_compression_v1_query(self, hge_ctx): + url, q, exp_resp = self._get_config(self.dir() + '/v1_query.yaml') + resp = self._make_post(hge_ctx, url, q, self.brotli_header) + self._assert_brotli(resp, exp_resp) + + + # If gzip and brotli encoding are requested the server prefers brotli + def test_gzip_brotli_graphql_query(self, hge_ctx): + url, q, exp_resp = self._get_config(self.dir() + '/graphql_query.yaml') + resp = self._make_post(hge_ctx, url, q, self.gzip_brotli_header) + self._assert_brotli(resp, exp_resp) + + def test_gzip_brotli_v1_query(self, hge_ctx): + url, q, exp_resp = self._get_config(self.dir() + '/v1_query.yaml') + resp = self._make_post(hge_ctx, url, q, self.gzip_brotli_header) + self._assert_brotli(resp, exp_resp) + + @classmethod + def dir(cls): + return 'queries/compression'