Support Node v12.x and fix tests on macOS (#2455)

* Bump node-sass to version ^4.12.0

This avoids sass/node-sass#2632, which causes compilation failures on
Node v12.x.

* Do most of the work in /pg_dump in Haskell instead of shell

The shell version caused problems on non-Linux systems since it relied
on the non-POSIX -i option for sed, which works slightly differently on
BSD and macOS.
This commit is contained in:
Alexis King 2019-07-03 03:37:13 -05:00 committed by Vamshi Surabhi
parent bdb415b386
commit fb3794c31a
5 changed files with 81 additions and 165 deletions

View File

@ -9324,12 +9324,6 @@
"integrity": "sha1-soqmKIorn8ZRA1x3EfZathkDMaY=",
"dev": true
},
"lodash.clonedeep": {
"version": "4.5.0",
"resolved": "https://registry.npmjs.org/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz",
"integrity": "sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=",
"dev": true
},
"lodash.curry": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/lodash.curry/-/lodash.curry-4.1.1.tgz",
@ -9432,12 +9426,6 @@
"integrity": "sha512-AOYza4+Hf5z1/0Hztxpm2/xiPZgi/cjMqdnKTUWTBSKchJlxXXuUSxCCl8rJlf4g6yww/j6mA8nC8Hw/EZWxKQ==",
"dev": true
},
"lodash.mergewith": {
"version": "4.6.1",
"resolved": "https://registry.npmjs.org/lodash.mergewith/-/lodash.mergewith-4.6.1.tgz",
"integrity": "sha512-eWw5r+PYICtEBgrBE5hhlT6aAa75f411bgDz/ZL2KZqYV03USvucsxcHUIlGTDTECs1eunpI7HOV7U+WLDvNdQ==",
"dev": true
},
"lodash.once": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz",
@ -9994,7 +9982,8 @@
"nan": {
"version": "2.11.1",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.11.1.tgz",
"integrity": "sha512-iji6k87OSXa0CcrLl9z+ZiYSuR2o+c0bGuNmXdrhTQTakxytAFsC56SArGYoiHlJlFoHSnvmhpceZJaXkVuOtA=="
"integrity": "sha512-iji6k87OSXa0CcrLl9z+ZiYSuR2o+c0bGuNmXdrhTQTakxytAFsC56SArGYoiHlJlFoHSnvmhpceZJaXkVuOtA==",
"optional": true
},
"nanomatch": {
"version": "1.2.13",
@ -10103,17 +10092,6 @@
"resolved": "https://registry.npmjs.org/semver/-/semver-5.3.0.tgz",
"integrity": "sha1-myzl094C0XxgEq0yaqa00M9U+U8=",
"dev": true
},
"tar": {
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/tar/-/tar-2.2.2.tgz",
"integrity": "sha512-FCEhQ/4rE1zYv9rYXJw/msRqsnmlje5jHP6huWeBZ704jUTy02c5AZyWujpMR1ax6mVw9NyJMfuK2CMDWVIfgA==",
"dev": true,
"requires": {
"block-stream": "*",
"fstream": "^1.0.12",
"inherits": "2"
}
}
}
},
@ -10172,9 +10150,9 @@
}
},
"node-sass": {
"version": "4.9.3",
"resolved": "https://registry.npmjs.org/node-sass/-/node-sass-4.9.3.tgz",
"integrity": "sha512-XzXyGjO+84wxyH7fV6IwBOTrEBe2f0a6SBze9QWWYR/cL74AcQUks2AsqcCZenl/Fp/JVbuEaLpgrLtocwBUww==",
"version": "4.12.0",
"resolved": "https://registry.npmjs.org/node-sass/-/node-sass-4.12.0.tgz",
"integrity": "sha512-A1Iv4oN+Iel6EPv77/HddXErL2a+gZ4uBeZUy+a8O35CFYTXhgA8MgLCWBtwpGZdCvTvQ9d+bQxX/QC36GDPpQ==",
"dev": true,
"requires": {
"async-foreach": "^0.1.3",
@ -10184,15 +10162,13 @@
"get-stdin": "^4.0.1",
"glob": "^7.0.3",
"in-publish": "^2.0.0",
"lodash.assign": "^4.2.0",
"lodash.clonedeep": "^4.3.2",
"lodash.mergewith": "^4.6.0",
"lodash": "^4.17.11",
"meow": "^3.7.0",
"mkdirp": "^0.5.1",
"nan": "^2.10.0",
"nan": "^2.13.2",
"node-gyp": "^3.8.0",
"npmlog": "^4.0.0",
"request": "2.87.0",
"request": "^2.88.0",
"sass-graph": "^2.2.4",
"stdout-stream": "^1.4.0",
"true-case-path": "^1.0.2"
@ -10208,58 +10184,11 @@
"which": "^1.2.9"
}
},
"har-validator": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.0.3.tgz",
"integrity": "sha1-ukAsJmGU8VlW7xXg/PJCmT9qff0=",
"dev": true,
"requires": {
"ajv": "^5.1.0",
"har-schema": "^2.0.0"
}
},
"oauth-sign": {
"version": "0.8.2",
"resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.8.2.tgz",
"integrity": "sha1-Rqarfwrq2N6unsBWV4C31O/rnUM=",
"nan": {
"version": "2.14.0",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz",
"integrity": "sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==",
"dev": true
},
"request": {
"version": "2.87.0",
"resolved": "https://registry.npmjs.org/request/-/request-2.87.0.tgz",
"integrity": "sha512-fcogkm7Az5bsS6Sl0sibkbhcKsnyon/jV1kF3ajGmF0c8HrttdKTPRT9hieOaQHA5HEq6r8OyWOo/o781C1tNw==",
"dev": true,
"requires": {
"aws-sign2": "~0.7.0",
"aws4": "^1.6.0",
"caseless": "~0.12.0",
"combined-stream": "~1.0.5",
"extend": "~3.0.1",
"forever-agent": "~0.6.1",
"form-data": "~2.3.1",
"har-validator": "~5.0.3",
"http-signature": "~1.2.0",
"is-typedarray": "~1.0.0",
"isstream": "~0.1.2",
"json-stringify-safe": "~5.0.1",
"mime-types": "~2.1.17",
"oauth-sign": "~0.8.2",
"performance-now": "^2.1.0",
"qs": "~6.5.1",
"safe-buffer": "^5.1.1",
"tough-cookie": "~2.3.3",
"tunnel-agent": "^0.6.0",
"uuid": "^3.1.0"
}
},
"tough-cookie": {
"version": "2.3.4",
"resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.3.4.tgz",
"integrity": "sha512-TZ6TTfI5NtZnuyy/Kecv+CnoROnyXn2DN97LontgQpCwsX2XyLYCC0ENhYkehSOwAp8rTQKc/NUIF7BkQ5rKLA==",
"dev": true,
"requires": {
"punycode": "^1.4.1"
}
}
}
},
@ -15398,6 +15327,17 @@
"integrity": "sha512-IlqtmLVaZA2qab8epUXbVWRn3aB1imbDMJtjB3nu4X0NqPkcY/JH9ZtCBWKHWPxs8Svi9tyo8w2dBoi07qZbBA==",
"dev": true
},
"tar": {
"version": "2.2.2",
"resolved": "https://registry.npmjs.org/tar/-/tar-2.2.2.tgz",
"integrity": "sha512-FCEhQ/4rE1zYv9rYXJw/msRqsnmlje5jHP6huWeBZ704jUTy02c5AZyWujpMR1ax6mVw9NyJMfuK2CMDWVIfgA==",
"dev": true,
"requires": {
"block-stream": "*",
"fstream": "^1.0.12",
"inherits": "2"
}
},
"test-exclude": {
"version": "5.1.0",
"resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.1.0.tgz",

View File

@ -162,7 +162,7 @@
"less-loader": "^4.1.0",
"lint-staged": "^6.1.1",
"mini-css-extract-plugin": "^0.4.0",
"node-sass": "^4.9.2",
"node-sass": "^4.12.0",
"nyc": "^13.3.0",
"optimize-css-assets-webpack-plugin": "^4.0.2",
"react-a11y": "^0.2.6",

View File

@ -3,18 +3,20 @@ module Hasura.Server.PGDump
, execPGDump
) where
import Control.Exception (IOException, try)
import Control.Exception (IOException, try)
import Data.Aeson.Casing
import Data.Aeson.TH
import qualified Data.ByteString.Lazy as BL
import qualified Data.FileEmbed as FE
import qualified Data.List as L
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import qualified Data.ByteString.Lazy as BL
import Data.Char (isSpace)
import qualified Data.List as L
import qualified Data.String.Conversions as CS
import qualified Data.Text as T
import qualified Database.PG.Query as Q
import Hasura.Prelude
import qualified Hasura.RQL.Types.Error as RTE
import qualified Hasura.RQL.Types.Error as RTE
import System.Exit
import System.Process
import qualified Text.Regex.TDFA as TDFA
data PGDumpReqBody =
PGDumpReqBody
@ -24,30 +26,13 @@ data PGDumpReqBody =
$(deriveJSON (aesonDrop 3 snakeCase) ''PGDumpReqBody)
script :: IsString a => a
script = $(FE.embedStringFile "src-rsr/run_pg_dump.sh")
runScript
:: String
-> [String]
-> String
-> IO (Either String BL.ByteString)
runScript dbUrl opts clean = do
(exitCode, filename, stdErr) <- readProcessWithExitCode "/bin/sh"
["/dev/stdin", dbUrl, unwords opts, clean] script
case exitCode of
ExitSuccess -> do
contents <- BL.readFile $ L.dropWhileEnd (== '\n') filename
return $ Right contents
ExitFailure _ -> return $ Left stdErr
execPGDump
:: (MonadError RTE.QErr m, MonadIO m)
=> PGDumpReqBody
-> Q.ConnInfo
-> m BL.ByteString
execPGDump b ci = do
eOutput <- liftIO $ try $ runScript dbUrl opts clean
eOutput <- liftIO $ try execProcess
output <- either throwException return eOutput
case output of
Left err ->
@ -57,11 +42,49 @@ execPGDump b ci = do
throwException :: (MonadError RTE.QErr m) => IOException -> m a
throwException _ = RTE.throw500 "internal exception while executing pg_dump"
-- FIXME(shahidhk): need to add connection options (Q.connOptions) too?
dbUrl = "postgres://" <> Q.connUser ci <> ":" <> Q.connPassword ci
<> "@" <> Q.connHost ci <> ":" <> show (Q.connPort ci)
<> "/" <> Q.connDatabase ci
opts = prbOpts b
clean = case prbCleanOutput b of
Just v -> show v
Nothing -> show False
execProcess = do
(exitCode, stdOut, stdErr) <- readProcessWithExitCode "pg_dump" opts ""
return $ case exitCode of
ExitSuccess -> Right $ CS.cs (clean stdOut)
ExitFailure _ -> Left $ CS.cs stdErr
opts = Q.pgConnString ci : "--encoding=utf8" : prbOpts b
clean str
| fromMaybe False (prbCleanOutput b) =
unlines $ filter (not . shouldDropLine) (lines str)
| otherwise = str
shouldDropLine line =
-- delete empty lines
all isSpace line
-- delete comments
|| "--" `L.isPrefixOf` line
-- delete front matter
|| line `elem` preambleLines
-- delete notify triggers
|| notifyTriggerRegex `TDFA.match` line
preambleLines =
[ "SET statement_timeout = 0;"
, "SET lock_timeout = 0;"
, "SET idle_in_transaction_session_timeout = 0;"
, "SET client_encoding = 'UTF8';"
, "SET standard_conforming_strings = on;"
, "SELECT pg_catalog.set_config('search_path', '', false);"
, "SET check_function_bodies = false;"
, "SET xmloption = content;"
, "SET client_min_messages = warning;"
, "SET row_security = off;"
, "SET default_tablespace = '';"
, "SET default_with_oids = false;"
, "CREATE SCHEMA public;"
, "COMMENT ON SCHEMA public IS 'standard public schema';"
]
notifyTriggerRegex =
let regexStr :: String =
"^CREATE TRIGGER \"?notify_hasura_.+\"? AFTER [[:alnum:]]+ "
<> "ON .+ FOR EACH ROW EXECUTE PROCEDURE "
<> "\"?hdb_views\"?\\.\"?notify_hasura_.+\"?\\(\\);$"
in TDFA.makeRegex regexStr :: TDFA.Regex

View File

@ -1,47 +0,0 @@
#! /usr/bin/env sh
set -e
filename=/tmp/pg_dump-$(date +%s).sql
template_file=/tmp/hasura_del_lines_template.txt
# input args
DB_URL=$1
OPTS=$2
CLEAN=$3
pg_dump "$DB_URL" $OPTS -f "$filename"
# clean the file the variable is True
if [ "$CLEAN" = "True" ]; then
# delete all comments
sed -i '/^--/d' "$filename"
# delete front matter
cat > $template_file << EOF
SET statement_timeout = 0;
SET lock_timeout = 0;
SET idle_in_transaction_session_timeout = 0;
SET client_encoding = 'UTF8';
SET standard_conforming_strings = on;
SELECT pg_catalog.set_config('search_path', '', false);
SET check_function_bodies = false;
SET client_min_messages = warning;
SET row_security = off;
SET default_tablespace = '';
SET default_with_oids = false;
CREATE SCHEMA public;
COMMENT ON SCHEMA public IS 'standard public schema';
EOF
while read -r line; do
sed -i '/^'"$line"'$/d' "$filename"
done < $template_file
# delete notify triggers
sed -i -E '/^CREATE TRIGGER "?notify_hasura_.+"? AFTER \w+ ON .+ FOR EACH ROW EXECUTE PROCEDURE "?hdb_views"?\."?notify_hasura_.+"?\(\);$/d' "$filename"
# delete empty lines
sed -i '/^[[:space:]]*$/d' "$filename"
fi
printf "%s" "$filename"

View File

@ -13,7 +13,7 @@ packages:
# Packages to be pulled from upstream that are not in the resolver (e.g., acme-missiles-0.3)
extra-deps:
- git: https://github.com/hasura/pg-client-hs.git
commit: 85f9c2c15e4fa09f2e2a86dbb23149b5256bdd34
commit: 3f905a5fa57e2898dbb733a777cadb80990b3d45
- git: https://github.com/hasura/graphql-parser-hs.git
commit: 39d175c5c7bca35ec04d13c92a39f600bd6413cf
- git: https://github.com/hasura/ci-info-hs.git