server: Upgrade the HGE server to GHC 9.4.5

Upgrade to GHC 9.4.5, and update any tests.

PR-URL: https://github.com/hasura/graphql-engine-mono/pull/8954
Co-authored-by: Mohd Bilal <24944223+m-Bilal@users.noreply.github.com>
Co-authored-by: Samir Talwar <47582+SamirTalwar@users.noreply.github.com>
Co-authored-by: Philip Lykke Carlsen <358550+plcplc@users.noreply.github.com>
GitOrigin-RevId: 5261126777cb478567ea471c4bf5441bc345ea0d
This commit is contained in:
Brandon Simmons 2023-06-06 09:27:46 -04:00 committed by hasura-bot
parent a0b2f29b93
commit 440a8664de
50 changed files with 306 additions and 209 deletions

View File

@ -1 +1 @@
9.2.5 9.4.5

4
.gitignore vendored
View File

@ -65,3 +65,7 @@ test_runs/
# dependencies # dependencies
node_modules node_modules
# ticky profiles, e.g. from dev.sh --prof-ticky
*.ticky
*.ticky.modules

View File

@ -16,8 +16,15 @@
-- --
-- See: https://www.haskell.org/cabal/users-guide/nix-local-build.html#configuring-builds-with-cabal-project -- See: https://www.haskell.org/cabal/users-guide/nix-local-build.html#configuring-builds-with-cabal-project
with-compiler: ghc-9.2.5 with-compiler: ghc-9.4.5
-- TODO WHEN WE NEXT UPGRADE GHC: move `-split-sections/--gc-sections` from ci.project.local into this file -- Work around bugs not yet fixed in 9.4.5. These are only enabled with O2
-- which we don't currently use, but disable these defensively
-- https://gitlab.haskell.org/ghc/ghc/-/merge_requests/10282
package *
ghc-options:
-fno-dicts-strict
-fno-spec-constr
-- package-level parallelism: -- package-level parallelism:
jobs: $ncpus jobs: $ncpus
@ -26,6 +33,32 @@ packages: server
packages: server/lib/*/*.cabal packages: server/lib/*/*.cabal
packages: server/forks/*/*.cabal packages: server/forks/*/*.cabal
-- TODO remove these when we are able:
allow-newer: ekg-core:base
allow-newer: ekg-core:ghc-prim
allow-newer: ekg-core:inspection-testing
allow-newer: ekg-core:text
allow-newer: ekg-json:base
allow-newer: ekg-json:text
allow-newer: ekg-prometheus:base
allow-newer: ekg-prometheus:text
allow-newer: ekg-prometheus:bytestring
-- Migrating to 0.25+ looks like it will be a real pain... :(
-- https://github.com/morpheusgraphql/morpheus-graphql/pull/766
allow-newer: morpheus-graphql:text
allow-newer: morpheus-graphql-app:text
allow-newer: morpheus-graphql-code-gen:text
allow-newer: morpheus-graphql-code-gen-utils:text
allow-newer: morpheus-graphql-core:text
allow-newer: morpheus-graphql-server:text
allow-newer: morpheus-graphql-client:text
allow-newer: morpheus-graphql-subscriptions:text
-- https://gitlab.haskell.org/ghc/ghc-debug/-/merge_requests/27
allow-newer: ghc-debug-stub:ghc-prim
-- https://github.com/mokus0/th-extras/pull/20
allow-newer: th-extras:template-haskell
package * package *
-- NOTE: this gets applied to both local (hasura) packages and dependencies, -- NOTE: this gets applied to both local (hasura) packages and dependencies,
-- but optimizations for local hasura packages are overridden/controlled -- but optimizations for local hasura packages are overridden/controlled
@ -39,6 +72,14 @@ package *
haddock-hyperlink-source: true haddock-hyperlink-source: true
haddock-quickjump: true haddock-quickjump: true
haddock-internal: true haddock-internal: true
-- -----------------------------------------------------------
-- Allow for dead-code elimination at link-time, to reduce binary size
ghc-options: -split-sections
if(os(linux))
package *
-- ld on M1 OSX does not recognise this:
ld-options: -Wl,--gc-sections
-- -----------------------------------------------------------
package graphql-engine package graphql-engine
ghc-options: -j ghc-options: -j
@ -53,10 +94,12 @@ source-repository-package
tag: c8fd863d359876af3a0fce47db8a5393dc556667 tag: c8fd863d359876af3a0fce47db8a5393dc556667
-- This is an unrelesaed version of "odbc" -- This is an unrelesaed version of "odbc"
-- For text v2 support
-- WIP/FIXME: review change, maybe upstream PR
source-repository-package source-repository-package
type: git type: git
location: https://github.com/fpco/odbc.git location: https://github.com/hasura/odbc.git
tag: 38e04349fe28a91f189e44bd7783220956d18aae tag: 662268ba4d968a23e67dfd13b1ec9d4d90481493
package odbc package odbc
ghc-options: -Wwarn ghc-options: -Wwarn
@ -81,12 +124,10 @@ source-repository-package
location: https://github.com/hasura/ekg-json.git location: https://github.com/hasura/ekg-json.git
tag: 1fab6837e1dd98317a3c2f5bf9deedf4dfcac51b tag: 1fab6837e1dd98317a3c2f5bf9deedf4dfcac51b
-- This is v1.2.5.0 with https://github.com/haskell/text/pull/348 -- because we need 27d87f01, not yet released
-- cherry-picked.
-- These changes are in version 2.0, but hopefully they can get backported:
-- https://github.com/haskell/text/issues/452
source-repository-package source-repository-package
type: git type: git
location: https://github.com/hasura/text.git location: https://github.com/snoyberg/yaml.git
tag: ba0fd2bf256c996a6c85dbdc8590a6fcde41b8f8 tag: c1aa7b3991e669e4c6a977712b495d40a54cf819
subdir: yaml

View File

@ -1,6 +1,8 @@
active-repositories: hackage.haskell.org:merge active-repositories: hackage.haskell.org:merge
constraints: any.Cabal ==3.6.3.0, constraints: any.Cabal ==3.8.1.0,
any.Cabal-syntax ==3.8.1.0,
any.Diff ==0.4.1, any.Diff ==0.4.1,
any.Glob ==0.10.2,
any.HTTP ==4000.4.1, any.HTTP ==4000.4.1,
any.HUnit ==1.6.2.0, any.HUnit ==1.6.2.0,
any.MonadRandom ==0.6, any.MonadRandom ==0.6,
@ -13,7 +15,7 @@ constraints: any.Cabal ==3.6.3.0,
any.Spock-core ==0.14.0.1, any.Spock-core ==0.14.0.1,
any.StateVar ==1.2.2, any.StateVar ==1.2.2,
any.adjunctions ==4.4.2, any.adjunctions ==4.4.2,
any.aeson ==2.1.0.0, any.aeson ==2.1.2.1,
any.aeson-casing ==0.2.0.0, any.aeson-casing ==0.2.0.0,
any.aeson-optics ==1.2.0.1, any.aeson-optics ==1.2.0.1,
any.aeson-pretty ==0.8.9, any.aeson-pretty ==0.8.9,
@ -36,7 +38,7 @@ constraints: any.Cabal ==3.6.3.0,
any.autodocodec ==0.2.0.3, any.autodocodec ==0.2.0.3,
any.autodocodec-openapi3 ==0.2.1.1, any.autodocodec-openapi3 ==0.2.1.1,
any.barbies ==2.0.3.1, any.barbies ==2.0.3.1,
any.base ==4.16.4.0, any.base ==4.17.1.0,
any.base-compat ==0.12.2, any.base-compat ==0.12.2,
any.base-compat-batteries ==0.12.2, any.base-compat-batteries ==0.12.2,
any.base-orphans ==0.8.7, any.base-orphans ==0.8.7,
@ -46,7 +48,7 @@ constraints: any.Cabal ==3.6.3.0,
any.basement ==0.0.15, any.basement ==0.0.15,
any.bifunctors ==5.5.13, any.bifunctors ==5.5.13,
any.bimap ==0.5.0, any.bimap ==0.5.0,
any.binary ==0.8.9.0, any.binary ==0.8.9.1,
any.binary-parser ==0.5.7.2, any.binary-parser ==0.5.7.2,
any.bitvec ==1.1.3.0, any.bitvec ==1.1.3.0,
any.blaze-builder ==0.4.2.2, any.blaze-builder ==0.4.2.2,
@ -58,7 +60,7 @@ constraints: any.Cabal ==3.6.3.0,
any.bson ==0.4.0.1, any.bson ==0.4.0.1,
any.byteable ==0.1.1, any.byteable ==0.1.1,
any.byteorder ==1.0.4, any.byteorder ==1.0.4,
any.bytestring ==0.11.3.1, any.bytestring ==0.11.4.0,
any.bytestring-builder ==0.10.8.2.0, any.bytestring-builder ==0.10.8.2.0,
any.bytestring-lexing ==0.5.0.9, any.bytestring-lexing ==0.5.0.9,
any.bytestring-strict-builder ==0.4.5.6, any.bytestring-strict-builder ==0.4.5.6,
@ -81,7 +83,7 @@ constraints: any.Cabal ==3.6.3.0,
any.connection ==0.3.1, any.connection ==0.3.1,
any.constraints ==0.13.4, any.constraints ==0.13.4,
any.constraints-extras ==0.3.2.1, any.constraints-extras ==0.3.2.1,
any.containers ==0.6.5.1, any.containers ==0.6.7,
any.contravariant ==1.5.5, any.contravariant ==1.5.5,
any.contravariant-extras ==0.3.5.3, any.contravariant-extras ==0.3.5.3,
any.cookie ==0.4.5, any.cookie ==0.4.5,
@ -109,17 +111,17 @@ constraints: any.Cabal ==3.6.3.0,
any.data-serializer ==0.3.5, any.data-serializer ==0.3.5,
any.data-textual ==0.3.0.3, any.data-textual ==0.3.0.3,
any.dec ==0.0.5, any.dec ==0.0.5,
any.deepseq ==1.4.6.1, any.deepseq ==1.4.8.0,
any.deferred-folds ==0.9.18.2, any.deferred-folds ==0.9.18.2,
any.dependent-map ==0.4.0.0, any.dependent-map ==0.4.0.0,
any.dependent-sum ==0.7.1.0, any.dependent-sum ==0.6.2.0,
any.dependent-sum-template ==0.1.1.1, any.dependent-sum-template ==0.1.1.1,
any.directory ==1.3.6.2, any.directory ==1.3.7.1,
any.distributive ==0.6.2.1, any.distributive ==0.6.2.1,
any.dlist ==1.0, any.dlist ==1.0,
any.dns ==4.1.0, any.dns ==4.1.0,
any.doctest ==0.21.1, any.doctest ==0.21.1,
any.double-conversion ==2.0.4.1, any.double-conversion ==2.0.4.2,
any.easy-file ==0.2.2, any.easy-file ==0.2.2,
any.either ==5.0.2, any.either ==5.0.2,
any.ekg-core ==0.1.1.7, any.ekg-core ==0.1.1.7,
@ -128,35 +130,35 @@ constraints: any.Cabal ==3.6.3.0,
any.entropy ==0.4.1.10, any.entropy ==0.4.1.10,
any.erf ==2.0.0.0, any.erf ==2.0.0.0,
any.errors ==2.3.0, any.errors ==2.3.0,
any.exceptions ==0.10.4, any.exceptions ==0.10.5,
any.extensible-exceptions ==0.1.1.4, any.extensible-exceptions ==0.1.1.4,
any.extra ==1.7.12, any.extra ==1.7.12,
any.fail ==4.9.0.0, any.fail ==4.9.0.0,
any.fast-logger ==3.1.1, any.fast-logger ==3.1.2,
any.file-embed ==0.0.15.0, any.file-embed ==0.0.15.0,
any.filepath ==1.4.2.2, any.filepath ==1.4.2.2,
any.flush-queue ==1.0.0, any.flush-queue ==1.0.0,
any.focus ==1.0.3, any.focus ==1.0.3,
any.fold-debounce ==0.2.0.10, any.fold-debounce ==0.2.0.11,
any.foldl ==1.4.12, any.foldl ==1.4.12,
any.formatting ==7.1.3, any.formatting ==7.2.0,
any.free ==5.1.9, any.free ==5.1.9,
any.generic-lens ==2.2.1.0, any.generic-lens ==2.2.1.0,
any.generic-lens-core ==2.2.1.0, any.generic-lens-core ==2.2.1.0,
any.generic-monoid ==0.1.0.1, any.generic-monoid ==0.1.0.1,
any.generically ==0.1, any.generically ==0.1,
any.generics-sop ==0.5.1.2, any.generics-sop ==0.5.1.2,
any.ghc ==9.2.5, any.ghc ==9.4.5,
any.ghc-bignum ==1.2, any.ghc-bignum ==1.3,
any.ghc-boot ==9.2.5, any.ghc-boot ==9.4.5,
any.ghc-boot-th ==9.2.5, any.ghc-boot-th ==9.4.5,
any.ghc-debug-convention ==0.4.0.0, any.ghc-debug-convention ==0.4.0.0,
any.ghc-debug-stub ==0.4.0.0, any.ghc-debug-stub ==0.4.0.0,
any.ghc-heap ==9.2.5, any.ghc-heap ==9.4.5,
any.ghc-heap-view ==0.6.3, any.ghc-heap-view ==0.6.4,
any.ghc-paths ==0.1.0.12, any.ghc-paths ==0.1.0.12,
any.ghc-prim ==0.8.0, any.ghc-prim ==0.9.0,
any.ghci ==9.2.5, any.ghci ==9.4.5,
any.happy ==1.20.0, any.happy ==1.20.0,
any.hashable ==1.4.1.0, any.hashable ==1.4.1.0,
any.hashtables ==1.3.1, any.hashtables ==1.3.1,
@ -173,9 +175,9 @@ constraints: any.Cabal ==3.6.3.0,
any.hpc ==0.6.1.0, any.hpc ==0.6.1.0,
any.hs-opentelemetry-otlp ==0.0.1.0, any.hs-opentelemetry-otlp ==0.0.1.0,
any.hsc2hs ==0.68.8, any.hsc2hs ==0.68.8,
any.hspec ==2.10.6, any.hspec ==2.10.10,
any.hspec-core ==2.10.6, any.hspec-core ==2.10.10,
any.hspec-discover ==2.10.6, any.hspec-discover ==2.10.10,
any.hspec-expectations ==0.8.2, any.hspec-expectations ==0.8.2,
any.hspec-expectations-json ==1.0.0.7, any.hspec-expectations-json ==1.0.0.7,
any.hspec-expectations-lifted ==0.10.0, any.hspec-expectations-lifted ==0.10.0,
@ -194,10 +196,10 @@ constraints: any.Cabal ==3.6.3.0,
any.indexed-traversable ==0.1.2, any.indexed-traversable ==0.1.2,
any.indexed-traversable-instances ==0.1.1.1, any.indexed-traversable-instances ==0.1.1.1,
any.insert-ordered-containers ==0.2.5.1, any.insert-ordered-containers ==0.2.5.1,
any.inspection-testing ==0.4.6.1, any.inspection-testing ==0.5.0.1,
any.integer-gmp ==1.1, any.integer-gmp ==1.1,
any.integer-logarithms ==1.0.3.1, any.integer-logarithms ==1.0.3.1,
any.invariant ==0.6, any.invariant ==0.6.1,
any.iproute ==1.7.12, any.iproute ==1.7.12,
any.iso8601-time ==0.1.5, any.iso8601-time ==0.1.5,
any.isomorphism-class ==0.1.0.7, any.isomorphism-class ==0.1.0.7,
@ -208,7 +210,7 @@ constraints: any.Cabal ==3.6.3.0,
any.keys ==3.12.3, any.keys ==3.12.3,
any.kriti-lang ==0.3.3, any.kriti-lang ==0.3.3,
any.launchdarkly-server-sdk ==4.0.0, any.launchdarkly-server-sdk ==4.0.0,
any.lens ==5.2, any.lens ==5.2.2,
any.lens-aeson ==1.2.2, any.lens-aeson ==1.2.2,
any.lens-family ==2.1.2, any.lens-family ==2.1.2,
any.lens-family-core ==2.1.2, any.lens-family-core ==2.1.2,
@ -227,6 +229,7 @@ constraints: any.Cabal ==3.6.3.0,
any.microlens-th ==0.4.3.10, any.microlens-th ==0.4.3.10,
any.mime-types ==0.1.1.0, any.mime-types ==0.1.1.0,
any.mmorph ==1.2.0, any.mmorph ==1.2.0,
any.modern-uri ==0.3.6.0,
any.monad-control ==1.0.3.1, any.monad-control ==1.0.3.1,
any.monad-logger ==0.3.37, any.monad-logger ==0.3.37,
any.monad-loops ==0.4.3, any.monad-loops ==0.4.3,
@ -234,10 +237,14 @@ constraints: any.Cabal ==3.6.3.0,
any.monad-validate ==1.2.0.1, any.monad-validate ==1.2.0.1,
any.mongoDB ==2.7.1.2, any.mongoDB ==2.7.1.2,
any.mono-traversable ==1.0.15.3, any.mono-traversable ==1.0.15.3,
any.morpheus-graphql ==0.20.0, any.morpheus-graphql ==0.24.3,
any.morpheus-graphql-app ==0.20.0, any.morpheus-graphql-app ==0.24.3,
any.morpheus-graphql-code-gen ==0.20.0, any.morpheus-graphql-client ==0.24.3,
any.morpheus-graphql-core ==0.20.0, any.morpheus-graphql-code-gen ==0.24.3,
any.morpheus-graphql-code-gen-utils ==0.24.3,
any.morpheus-graphql-core ==0.24.3,
any.morpheus-graphql-server ==0.24.3,
any.morpheus-graphql-subscriptions ==0.24.3,
any.mtl ==2.2.2, any.mtl ==2.2.2,
any.mtl-compat ==0.2.2, any.mtl-compat ==0.2.2,
any.mustache ==2.4.1, any.mustache ==2.4.1,
@ -251,7 +258,7 @@ constraints: any.Cabal ==3.6.3.0,
any.nonce ==1.0.7, any.nonce ==1.0.7,
any.nonempty-containers ==0.3.4.4, any.nonempty-containers ==0.3.4.4,
any.nonempty-vector ==0.2.1.0, any.nonempty-vector ==0.2.1.0,
any.odbc ==0.2.6, any.odbc ==0.2.7,
any.old-locale ==1.0.0.7, any.old-locale ==1.0.0.7,
any.old-time ==1.1.0.3, any.old-time ==1.1.0.3,
any.openapi3 ==3.2.2, any.openapi3 ==3.2.2,
@ -296,7 +303,8 @@ constraints: any.Cabal ==3.6.3.0,
any.regex-base ==0.94.0.2, any.regex-base ==0.94.0.2,
any.regex-posix ==0.96.0.1, any.regex-posix ==0.96.0.1,
any.regex-tdfa ==1.3.2, any.regex-tdfa ==1.3.2,
any.relude ==1.1.0.0, any.relude ==1.2.0.0,
any.req ==3.13.0,
any.reroute ==0.7.0.0, any.reroute ==0.7.0.0,
any.resourcet ==1.2.6, any.resourcet ==1.2.6,
any.retry ==0.9.3.0, any.retry ==0.9.3.0,
@ -320,11 +328,11 @@ constraints: any.Cabal ==3.6.3.0,
any.simple-sendfile ==0.2.30, any.simple-sendfile ==0.2.30,
any.singleton-bool ==0.1.6, any.singleton-bool ==0.1.6,
any.socks ==0.6.1, any.socks ==0.6.1,
any.some ==1.0.3, any.some ==1.0.5,
any.sop-core ==0.5.0.2, any.sop-core ==0.5.0.2,
any.split ==0.2.3.5, any.split ==0.2.3.5,
any.splitmix ==0.1.0.4, any.splitmix ==0.1.0.4,
any.stm ==2.5.0.2, any.stm ==2.5.1.0,
any.stm-chans ==3.0.0.6, any.stm-chans ==3.0.0.6,
any.stm-containers ==1.2, any.stm-containers ==1.2,
any.stm-delay ==0.1.1.1, any.stm-delay ==0.1.1.1,
@ -332,14 +340,15 @@ constraints: any.Cabal ==3.6.3.0,
any.streaming-commons ==0.2.2.5, any.streaming-commons ==0.2.2.5,
any.strict ==0.4.0.1, any.strict ==0.4.0.1,
any.string-conversions ==0.4.0.1, any.string-conversions ==0.4.0.1,
any.string-interpolate ==0.3.1.2, any.string-interpolate ==0.3.2.0,
any.superbuffer ==0.3.1.2, any.superbuffer ==0.3.1.2,
any.syb ==0.7.2.2, any.syb ==0.7.2.2,
any.system-cxx-std-lib ==1.0,
any.system-filepath ==0.4.14, any.system-filepath ==0.4.14,
any.tagged ==0.8.6.1, any.tagged ==0.8.6.1,
any.tasty ==1.4.2.3, any.tasty ==1.4.2.3,
any.tasty-bench ==0.3.2, any.tasty-bench ==0.3.2,
any.template-haskell ==2.18.0.0, any.template-haskell ==2.19.0.0,
any.template-haskell-compat-v0208 ==0.1.9.1, any.template-haskell-compat-v0208 ==0.1.9.1,
any.temporary ==1.3, any.temporary ==1.3,
any.terminal-size ==0.3.3, any.terminal-size ==0.3.3,
@ -347,7 +356,7 @@ constraints: any.Cabal ==3.6.3.0,
any.test-framework ==0.8.2.0, any.test-framework ==0.8.2.0,
any.test-framework-hunit ==0.3.0.2, any.test-framework-hunit ==0.3.0.2,
any.testcontainers ==0.5.0.0, any.testcontainers ==0.5.0.0,
any.text ==1.2.5.0, any.text ==2.0.1,
any.text-builder ==0.6.7, any.text-builder ==0.6.7,
any.text-builder-dev ==0.3.3, any.text-builder-dev ==0.3.3,
any.text-conversions ==0.3.1.1, any.text-conversions ==0.3.1.1,
@ -366,7 +375,7 @@ constraints: any.Cabal ==3.6.3.0,
any.th-reify-many ==0.1.10, any.th-reify-many ==0.1.10,
any.these ==1.1.1.1, any.these ==1.1.1.1,
any.these-skinny ==0.7.5, any.these-skinny ==0.7.5,
any.time ==1.11.1.1, any.time ==1.12.2,
any.time-compat ==1.9.6.1, any.time-compat ==1.9.6.1,
any.time-locale-compat ==0.1.1.5, any.time-locale-compat ==0.1.1.5,
any.time-manager ==0.0.0, any.time-manager ==0.0.0,
@ -379,7 +388,7 @@ constraints: any.Cabal ==3.6.3.0,
any.typed-process ==0.2.10.1, any.typed-process ==0.2.10.1,
any.unagi-chan ==0.4.1.4, any.unagi-chan ==0.4.1.4,
any.unbounded-delays ==0.1.1.1, any.unbounded-delays ==0.1.1.1,
any.unix ==2.7.2.2, any.unix ==2.7.3,
any.unix-compat ==0.6, any.unix-compat ==0.6,
any.unix-time ==0.4.8, any.unix-time ==0.4.8,
any.unliftio ==0.2.23.0, any.unliftio ==0.2.23.0,
@ -407,21 +416,22 @@ constraints: any.Cabal ==3.6.3.0,
any.warp ==3.3.23, any.warp ==3.3.23,
any.wcwidth ==0.0.2, any.wcwidth ==0.0.2,
any.websockets ==0.12.7.3, any.websockets ==0.12.7.3,
any.wide-word ==0.1.1.2, any.wide-word ==0.1.5.0,
any.witch ==1.1.2.0, any.witch ==1.1.2.0,
any.witherable ==0.4.2, any.witherable ==0.4.2,
any.wl-pprint-annotated ==0.1.0.1, any.wl-pprint-annotated ==0.1.0.1,
any.word-wrap ==0.5, any.word-wrap ==0.5,
any.word8 ==0.1.3, any.word8 ==0.1.3,
any.wreq ==0.5.3.3, any.wreq ==0.5.3.3,
any.wuss ==2.0.1.3,
any.x509 ==1.7.7, any.x509 ==1.7.7,
any.x509-store ==1.6.9, any.x509-store ==1.6.9,
any.x509-system ==1.6.7, any.x509-system ==1.6.7,
any.x509-validation ==1.6.12, any.x509-validation ==1.6.12,
any.xml ==1.3.14, any.xml ==1.3.14,
any.xml-conduit ==1.9.1.1, any.xml-conduit ==1.9.1.2,
any.xml-lens ==0.3.1, any.xml-lens ==0.3.1,
any.xml-types ==0.3.8, any.xml-types ==0.3.8,
any.yaml ==0.11.8.0, any.yaml ==0.11.10.0,
any.zlib ==0.6.3.0, any.zlib ==0.6.3.0,
index-state: hackage.haskell.org 2023-03-27T13:56:15Z index-state: hackage.haskell.org 2023-04-26T15:43:24Z

View File

@ -10,12 +10,6 @@ package *
-j2 -j2
-- For performance, with `-j`: -- For performance, with `-j`:
+RTS -A64m -n2m -RTS +RTS -A64m -n2m -RTS
-- -----------------------------------------------------------
-- Allow for dead-code elimination at link-time, to reduce binary size
-- TODO WHEN WE NEXT UPGRADE GHC: move this into cabal.project
ghc-options: -split-sections
ld-options: -Wl,--gc-sections
-- -----------------------------------------------------------
-- Enable optimizations in all local (hasura) packages: -- Enable optimizations in all local (hasura) packages:
flags: +optimize-hasura flags: +optimize-hasura

View File

@ -5,7 +5,7 @@
import: cabal/dev-sh-optimized.project.local import: cabal/dev-sh-optimized.project.local
-- Apply to all local packages (consider an additional mode that recompiles libraries too)
program-options program-options
ghc-options: -ticky ghc-options: -ticky
-- TODO doesn't seem to work with -ticky??:
ghc-options: -ddump-stg-final -ddump-to-file ghc-options: -ddump-stg-final -ddump-to-file

View File

@ -1,5 +1,5 @@
-- THIS IS JUST A PLACEHOLDER FOR NOW. WE CAN ENABLE THIS AND INTEGRATE IT -- Enable lower distortion time profiling, powers `dev.sh graphql-engine --prof-time`
-- INTO DEV.SH AFTER MOVING TO GHC 9.4 -- See: https://downloads.haskell.org/ghc/latest/docs/users_guide/profiling.html?highlight=fprof%20late#ghc-flag--fprof-late
import: cabal/dev-sh-optimized.project.local import: cabal/dev-sh-optimized.project.local
profiling: True profiling: True
@ -7,7 +7,10 @@ flags: +profiling
package * package *
profiling-detail: none profiling-detail: none
ghc-options: -fprof-late
-- For each module, STG will be dumped to: -- For each module, STG will be dumped to:
-- dist-newstyle/**/*.dump-stg-final -- dist-newstyle/**/*.dump-stg-final
ghc-options: -ddump-stg-final -ddump-to-file ghc-options: -ddump-stg-final -ddump-to-file
-- Apply late cost centers only to all local packages to minimize distortion:
program-options
ghc-options: -fprof-late

View File

@ -32,9 +32,9 @@ func TestProjectMetadataOps_Apply(t *testing.T) {
v3Expected := v3Expected :=
`{"is_consistent":false,"inconsistent_objects":[ `{"is_consistent":false,"inconsistent_objects":[
{"definition":{"name":"t4","schema":"pub"},"name":"table pub.t4 in source default","reason":"Inconsistent object: no such table/view exists in source: \"pub.t4\"","type":"table"}, {"definition":{"name":"t4","schema":"pub"},"name":"table pub.t4 in source default","reason":"Inconsistent object: no such table/view exists in source: \"pub.t4\"","type":"table"},
{"definition":{"name":"t2","schema":"public"},"name":"table t2 in source default","reason":"Inconsistent object: no such table/view exists in source: \"t2\"","type":"table"},
{"definition":{"name":"t3","schema":"pub"},"name":"table pub.t3 in source default","reason":"Inconsistent object: no such table/view exists in source: \"pub.t3\"","type":"table"}, {"definition":{"name":"t3","schema":"pub"},"name":"table pub.t3 in source default","reason":"Inconsistent object: no such table/view exists in source: \"pub.t3\"","type":"table"},
{"definition":{"name":"t1","schema":"public"},"name":"table t1 in source default","reason":"Inconsistent object: no such table/view exists in source: \"t1\"","type":"table"} {"definition":{"name":"t1","schema":"public"},"name":"table t1 in source default","reason":"Inconsistent object: no such table/view exists in source: \"t1\"","type":"table"},
{"definition":{"name":"t2","schema":"public"},"name":"table t2 in source default","reason":"Inconsistent object: no such table/view exists in source: \"t2\"","type":"table"}
]}` ]}`
tests := []struct { tests := []struct {
name string name string

View File

@ -10,15 +10,6 @@
"name": "table pub.t4 in source default", "name": "table pub.t4 in source default",
"type": "table" "type": "table"
}, },
{
"definition": {
"schema": "public",
"name": "t2"
},
"reason": "Inconsistent object: no such table/view exists in source: \"t2\"",
"name": "table t2 in source default",
"type": "table"
},
{ {
"definition": { "definition": {
"schema": "pub", "schema": "pub",
@ -36,6 +27,15 @@
"reason": "Inconsistent object: no such table/view exists in source: \"t1\"", "reason": "Inconsistent object: no such table/view exists in source: \"t1\"",
"name": "table t1 in source default", "name": "table t1 in source default",
"type": "table" "type": "table"
},
{
"definition": {
"schema": "public",
"name": "t2"
},
"reason": "Inconsistent object: no such table/view exists in source: \"t2\"",
"name": "table t2 in source default",
"type": "table"
} }
] ]
} }

View File

@ -5,11 +5,11 @@
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1681202837, "lastModified": 1685518550,
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=", "narHash": "sha256-o2d0KcvaXzTrPRIo0kOLV0/QXHhDQ5DTi+OxcjO8xqY=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "cfacdce06f30d2b68473a46042957675eebb3401", "rev": "a1720a10a6cfe8234c0e93907ffe81be440f4cef",
"type": "github" "type": "github"
}, },
"original": { "original": {
@ -20,11 +20,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1682449794, "lastModified": 1685534442,
"narHash": "sha256-Ri9mSJ8ykAY4KS/hxVZwW3IhqFmowLubTJ1lcKiInTU=", "narHash": "sha256-NoxAjHiGsmnCeIY3SlMRA3DT/n+cACm+TPtaFR6hD+M=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "a7b0ff0a0de9d5576fe15fa9083c933cfd430902", "rev": "d4438bded890ecd5179cf366839b26ea0b505a87",
"type": "github" "type": "github"
}, },
"original": { "original": {

View File

@ -5,6 +5,9 @@ import nixpkgs {
inherit system; inherit system;
config = { config = {
allowUnfree = true; allowUnfree = true;
permittedInsecurePackages = [
"nodejs-16.20.0" # until we upgrade our node.js version in .nvmrc
];
}; };
overlays = [ overlays = [
(import ./overlays/ghc.nix) (import ./overlays/ghc.nix)

View File

@ -1,11 +1,11 @@
final: prev: { final: prev: {
haskell = prev.haskell // { haskell = prev.haskell // {
packages = prev.haskell.packages // { packages = prev.haskell.packages // {
ghc925 = prev.haskell.packages."${prev.ghcName}".override (old: { ${prev.ghcName} = prev.haskell.packages.${prev.ghcName}.override (old: {
overrides = prev.lib.composeExtensions overrides = prev.lib.composeExtensions
(old.overrides or (_: _: { })) (old.overrides or (_: _: { }))
(hfinal: hprev: { (hfinal: hprev: {
aeson-ordered = final.haskell.packages."${prev.ghcName}".callCabal2nix "aeson-ordered" ../../server/lib/aeson-ordered { }; aeson-ordered = final.haskell.packages.${prev.ghcName}.callCabal2nix "aeson-ordered" ../../server/lib/aeson-ordered { };
}); });
}); });
}; };

View File

@ -1,12 +1,12 @@
final: prev: { final: prev: {
haskell = prev.haskell // { haskell = prev.haskell // {
packages = prev.haskell.packages // { packages = prev.haskell.packages // {
ghc925 = prev.haskell.packages."${prev.ghcName}".override (old: { ${prev.ghcName} = prev.haskell.packages.${prev.ghcName}.override (old: {
overrides = prev.lib.composeExtensions overrides = prev.lib.composeExtensions
(old.overrides or (_: _: { })) (old.overrides or (_: _: { }))
(hfinal: hprev: { (hfinal: hprev: {
# Tests don't compile as extra-source-files are missing # Tests don't compile as extra-source-files are missing
dc-api = prev.haskell.lib.dontCheck (final.haskell.packages."${prev.ghcName}".callCabal2nix "dc-api" ../../server/lib/dc-api { }); dc-api = prev.haskell.lib.dontCheck (final.haskell.packages.${prev.ghcName}.callCabal2nix "dc-api" ../../server/lib/dc-api { });
}); });
}); });
}; };

View File

@ -0,0 +1,10 @@
--- a/compiler/GHC/Linker/Loader.hs
+++ b/compiler/GHC/Linker/Loader.hs
@@ -1616,8 +1616,6 @@
| verbatim = [lib]
| otherwise = [ "lib" ++ lib ++ lib_tag <.> "a"
, lib <.> "a" -- native code has no lib_tag
- , "lib" ++ lib
- , lib
]
lib_tag = if is_hs && loading_profiled_hs_libs then "_p" else ""

View File

@ -2,21 +2,17 @@ self: super:
let let
versions = import ../versions.nix { pkgs = super; }; versions = import ../versions.nix { pkgs = super; };
ghcVersion = "9.2.5"; ghcName = "ghc${builtins.replaceStrings ["."] [""] versions.ghcVersion}";
ghcName = "ghc${builtins.replaceStrings ["."] [""] ghcVersion}";
ghcPatches = ghcPatches =
if super.stdenv.targetPlatform.isDarwin if super.stdenv.targetPlatform.isDarwin
then [ then [
# Copied from https://github.com/NixOS/nixpkgs/pull/149942 # Copied from https://github.com/NixOS/nixpkgs/pull/149942
# If the GHC version is updated, we must update the patch URL too. # If the GHC version is updated, we must update the patch too.
# --- # ---
# Reverts the linking behavior of GHC to not resolve `-libc++` to `c++`. # Reverts the linking behavior of GHC to not resolve `-libc++` to `c++`.
# Without this, we get the following error on macOS: # Without this, we get the following error on macOS:
# ghc: loadArchive: Neither an archive, nor a fat archive: `/path/to/clang++' # ghc: loadArchive: Neither an archive, nor a fat archive: `/path/to/clang++'
(super.fetchpatch { ./ghc-9.4-macOS-loadArchive-fix.patch
url = "https://raw.githubusercontent.com/input-output-hk/haskell.nix/4b6ee9767daaf5fc1d5419e07733ab006b95ec93/overlays/patches/ghc/ghc-9.2-macOS-loadArchive-fix.patch";
sha256 = "qOfIOqI5oM9695iWtK//OoP7NeF9K6ykGsdSOWJQN/I=";
})
] else [ ]; ] else [ ];
in in
{ {
@ -29,25 +25,14 @@ in
packages = super.haskell.packages // { packages = super.haskell.packages // {
${ghcName} = super.haskell.packages.${ghcName}.override { ${ghcName} = super.haskell.packages.${ghcName}.override {
overrides = hself: hsuper: overrides = hself: hsuper: {
{ # ghcid tests are broken on GHC 9.4
# This is due to a GHC 9.2.5 regression. ghcid = super.haskell.lib.dontCheck hsuper.ghcid;
# see https://gitlab.haskell.org/ghc/ghc/-/issues/22425 };
ListLike = super.haskell.lib.dontCheck hsuper.ListLike;
} //
(if super.stdenv.targetPlatform.isDarwin
then
# macOS-specific overrides:
{
# On aarch64-darwin, this creates a cycle for some reason; didn't look too much into it.
ghcid = super.haskell.lib.overrideCabal hsuper.ghcid (drv: { enableSeparateBinOutput = false; });
}
else
# We don't need to override anything on Linux:
{ });
}; };
}; };
}; };
inherit ghcVersion ghcName; inherit (versions) ghcVersion;
inherit ghcName;
} }

View File

@ -1,11 +1,11 @@
final: prev: { final: prev: {
haskell = prev.haskell // { haskell = prev.haskell // {
packages = prev.haskell.packages // { packages = prev.haskell.packages // {
ghc925 = prev.haskell.packages.ghc925.override (old: { ${prev.ghcName} = prev.haskell.packages.${prev.ghcName}.override (old: {
overrides = prev.lib.composeExtensions overrides = prev.lib.composeExtensions
(old.overrides or (_: _: { })) (old.overrides or (_: _: { }))
(hfinal: hprev: { (hfinal: hprev: {
graphql-parser = (final.haskell.packages.ghc925.callCabal2nix "graphql-parser" ../../server/lib/graphql-parser-hs { }).overrideScope ( graphql-parser = (final.haskell.packages.${prev.ghcName}.callCabal2nix "graphql-parser" ../../server/lib/graphql-parser-hs { }).overrideScope (
final: prev: { final: prev: {
hedgehog = final.hedgehog_1_2; hedgehog = final.hedgehog_1_2;
} }

View File

@ -1,34 +1,20 @@
# Upgrade Ormolu to v0.7.0.0. # Fix Ormolu v0.7.0.0.
# #
# We start with v0.6.0.1, upgrade its `src`, and override dependencies to make # The wrong versions of certain packages are supplied. This overrides them with
# the version constraints happy. # the correct versions.
#
# We build it with GHC 9.4 because it bundles text v2, which is required for
# Ormolu v0.5.3 and up. This means we can't just override the Hackage package;
# instead, we override the root-level package and use that directly.
#
# Because of this, HLS will use the wrong Ormolu version. We can resolve this
# once we upgrade to GHC 9.4 *and* a version of hls-ormolu-plugin is released
# which supports this version of Ormolu (at the time of writing, only v0.5.x
# and older are supported).
self: super: self: super:
let {
overridden = super.haskell.packages.ghc94.override { haskell = super.haskell // {
overrides = hself: hsuper: { packages = super.haskell.packages // {
ormolu = (super.haskell.lib.overrideSrc hsuper.ormolu_0_6_0_1 rec { ${self.ghcName} = super.haskell.packages.${self.ghcName}.override {
version = "0.7.0.0"; overrides = hself: hsuper: {
src = super.fetchurl { ormolu_0_7_0_0 = hsuper.ormolu_0_7_0_0.override {
url = "mirror://hackage/ormolu-${version}.tar.gz"; Cabal-syntax = hsuper.Cabal-syntax_3_10_1_0;
sha256 = "07bwcki2xp2g5q3jpll8675yawr1x6nk3zg1yns8mdw085a98g7s"; ghc-lib-parser = hsuper.ghc-lib-parser_9_6_2_20230523;
};
}; };
}).override {
"Cabal-syntax" = hsuper."Cabal-syntax_3_10_1_0";
"ghc-lib-parser" = hsuper."ghc-lib-parser_9_6_1_20230312";
}; };
}; };
}; };
in
{
ormolu = overridden.ormolu;
} }

View File

@ -1,11 +1,11 @@
final: prev: { final: prev: {
haskell = prev.haskell // { haskell = prev.haskell // {
packages = prev.haskell.packages // { packages = prev.haskell.packages // {
ghc925 = prev.haskell.packages."${prev.ghcName}".override (old: { ${prev.ghcName} = prev.haskell.packages.${prev.ghcName}.override (old: {
overrides = prev.lib.composeExtensions overrides = prev.lib.composeExtensions
(old.overrides or (_: _: { })) (old.overrides or (_: _: { }))
(hfinal: hprev: { (hfinal: hprev: {
pg-client = prev.haskell.lib.dontCheck (final.haskell.packages."${prev.ghcName}".callCabal2nix "pg-client" ../../server/lib/pg-client-hs { }); pg-client = prev.haskell.lib.dontCheck (final.haskell.packages.${prev.ghcName}.callCabal2nix "pg-client" ../../server/lib/pg-client-hs { });
}); });
}); });
}; };

View File

@ -1,11 +1,11 @@
final: prev: { final: prev: {
haskell = prev.haskell // { haskell = prev.haskell // {
packages = prev.haskell.packages // { packages = prev.haskell.packages // {
ghc925 = prev.haskell.packages."${prev.ghcName}".override (old: { ${prev.ghcName} = prev.haskell.packages.${prev.ghcName}.override (old: {
overrides = prev.lib.composeExtensions overrides = prev.lib.composeExtensions
(old.overrides or (_: _: { })) (old.overrides or (_: _: { }))
(hfinal: hprev: { (hfinal: hprev: {
resource-pool = final.haskell.packages."${prev.ghcName}".callCabal2nix "resource-pool" ../../server/lib/pool { }; resource-pool = final.haskell.packages.${prev.ghcName}.callCabal2nix "resource-pool" ../../server/lib/pool { };
}); });
}); });
}; };

View File

@ -45,9 +45,13 @@ let
buildInputs = [ original pkgs.makeWrapper ]; buildInputs = [ original pkgs.makeWrapper ];
installPhase = '' installPhase = ''
mkdir -p "$out/bin" mkdir -p "$out/bin"
makeWrapper ${original}/bin/ghc "$out/bin/ghc" \ for bin in ${original}/bin/*; do
--set LD_LIBRARY_PATH ${pkgs.lib.strings.makeLibraryPath dynamicLibraries} \ if [[ -x "$bin" ]]; then
--set DYLD_LIBRARY_PATH ${pkgs.lib.strings.makeLibraryPath dynamicLibraries} makeWrapper "$bin" "$out/bin/$(basename "$bin")" \
--set LD_LIBRARY_PATH ${pkgs.lib.strings.makeLibraryPath dynamicLibraries} \
--set DYLD_LIBRARY_PATH ${pkgs.lib.strings.makeLibraryPath dynamicLibraries}
fi
done
''; '';
}; };
@ -93,20 +97,18 @@ let
haskellInputs = [ haskellInputs = [
pkgs.cabal2nix pkgs.cabal2nix
# Ormolu is special; it's provided by our overlay.
pkgs.ormolu
ghc ghc
hls hls
pkgs.haskell.packages.${pkgs.ghcName}.alex pkgs.haskell.packages.${pkgs.ghcName}.alex
pkgs.haskell.packages.${pkgs.ghcName}.apply-refact # pkgs.haskell.packages.${pkgs.ghcName}.apply-refact
(versions.ensureVersion pkgs.haskell.packages.${pkgs.ghcName}.cabal-install) (versions.ensureVersion pkgs.haskell.packages.${pkgs.ghcName}.cabal-install)
pkgs.haskell.packages.${pkgs.ghcName}.ghcid # pkgs.haskell.packages.${pkgs.ghcName}.ghcid
pkgs.haskell.packages.${pkgs.ghcName}.happy pkgs.haskell.packages.${pkgs.ghcName}.happy
(versions.ensureVersion pkgs.haskell.packages.${pkgs.ghcName}.hlint) (versions.ensureVersion pkgs.haskell.packages.${pkgs.ghcName}.hlint)
pkgs.haskell.packages.${pkgs.ghcName}.hoogle pkgs.haskell.packages.${pkgs.ghcName}.hoogle
pkgs.haskell.packages.${pkgs.ghcName}.hspec-discover pkgs.haskell.packages.${pkgs.ghcName}.hspec-discover
(versions.ensureVersion pkgs.haskell.packages.${pkgs.ghcName}.ormolu_0_7_0_0)
]; ];
devInputs = [ devInputs = [

View File

@ -10,5 +10,7 @@ in
then package then package
else throw "Invalid version for package ${package.pname}: expected ${expected}, got ${package.version}"; else throw "Invalid version for package ${package.pname}: expected ${expected}, got ${package.version}";
ghcVersion = pkgs.lib.strings.fileContents ../.ghcversion;
nodejsVersion = pkgs.lib.strings.fileContents ../.nvmrc; nodejsVersion = pkgs.lib.strings.fileContents ../.nvmrc;
} }

View File

@ -47,7 +47,7 @@ Available COMMANDs:
--prof-ticky : "Ticky ticky" profiling for accounting of allocations (see: cabal/README.md) --prof-ticky : "Ticky ticky" profiling for accounting of allocations (see: cabal/README.md)
--prof-heap-infomap : Heap profiling (see: cabal/README.md) --prof-heap-infomap : Heap profiling (see: cabal/README.md)
--prof-ghc-debug : Enable ghc-debug (see: cabal/README.md) --prof-ghc-debug : Enable ghc-debug (see: cabal/README.md)
--prof-time : NOT YET IMPLEMENTED (TODO After 9.4) (see: cabal/README.md) --prof-time : Time profiling (see: cabal/README.md)
postgres postgres
Launch a postgres container suitable for use with graphql-engine, watch its Launch a postgres container suitable for use with graphql-engine, watch its
@ -124,11 +124,13 @@ case "${1-}" in
;; ;;
--prof-ticky) --prof-ticky)
echo_warn "This will delete any '$EDITION_NAME.ticky' and perform significant recompilation. Ok?" if [ -f "$EDITION_NAME.ticky" ]; then
echo_warn "Press enter to continue [will proceed in 10s]" echo_error "The file '$EDITION_NAME.ticky' exists and we would clobber it. Please delete or rename it and try again."
exit 1
fi
echo_warn "This will perform significant recompilation. Ok?"
echo_warn " Press enter to continue [will proceed in 10s]"
read -r -t10 || true read -r -t10 || true
# Avoid confusion:
rm -f "$EDITION_NAME.ticky"
CABAL_PROJECT_FILE=cabal/dev-sh-prof-ticky.project CABAL_PROJECT_FILE=cabal/dev-sh-prof-ticky.project
HASURA_PROF_MODE=ticky HASURA_PROF_MODE=ticky
GRAPHQL_ENGINE_EXTRA_ARGS+=( +RTS -r -RTS ) GRAPHQL_ENGINE_EXTRA_ARGS+=( +RTS -r -RTS )
@ -177,7 +179,6 @@ case "${1-}" in
;; ;;
--prof-time) --prof-time)
die_usage # NOT YET IMPLEMENTED
echo_warn "This will delete any $EDITION_NAME.prof and perform significant recompilation." echo_warn "This will delete any $EDITION_NAME.prof and perform significant recompilation."
echo_warn "Press enter to continue [will proceed in 10s]" echo_warn "Press enter to continue [will proceed in 10s]"
read -r -t10 || true read -r -t10 || true
@ -186,6 +187,10 @@ case "${1-}" in
CABAL_PROJECT_FILE=cabal/dev-sh-prof-time.project CABAL_PROJECT_FILE=cabal/dev-sh-prof-time.project
HASURA_PROF_MODE="time" HASURA_PROF_MODE="time"
GRAPHQL_ENGINE_EXTRA_ARGS+=( +RTS -P -RTS ) GRAPHQL_ENGINE_EXTRA_ARGS+=( +RTS -P -RTS )
# TODO alternatively we can do `-pj` and use speedscope (unfortunately we
# can't get both formats of output), but I think profiterole is more
# useful
# GRAPHQL_ENGINE_EXTRA_ARGS+=( +RTS -pj -RTS )
case "${3-}" in case "${3-}" in
--) --)
GRAPHQL_ENGINE_EXTRA_ARGS+=( "${@:4}" ) GRAPHQL_ENGINE_EXTRA_ARGS+=( "${@:4}" )
@ -386,6 +391,12 @@ function start_dbs() {
################################# #################################
if [ "$MODE" = "graphql-engine" ] || [ "$MODE" = "graphql-engine-pro" ]; then if [ "$MODE" = "graphql-engine" ] || [ "$MODE" = "graphql-engine-pro" ]; then
# Set the file descriptor limit up to the hard limit. The common default of
# 1024 is too low to really properly test subscriptions for instance.
# It might be best just to do this in the engines:
# https://hackage.haskell.org/package/unix-2.8.1.1/docs/System-Posix-Resource.html
ulimit -Sn unlimited
cd "$PROJECT_ROOT" cd "$PROJECT_ROOT"
# Existing tix files for a different hge binary will cause issues: # Existing tix files for a different hge binary will cause issues:
rm -f "$EDITION_NAME.tix" rm -f "$EDITION_NAME.tix"
@ -396,12 +407,65 @@ if [ "$MODE" = "graphql-engine" ] || [ "$MODE" = "graphql-engine-pro" ]; then
### Run analysis or visualization tools, if we ran in one of the profiling modes ### Run analysis or visualization tools, if we ran in one of the profiling modes
case "${HASURA_PROF_MODE-}" in case "${HASURA_PROF_MODE-}" in
ticky) ticky)
echo_warn "Done. View the ticky report at: $EDITION_NAME.ticky" TICKY_FILENAME=$("$PROJECT_ROOT"/scripts/get-version.sh)-$(date +%s).ticky
echo_warn "See: https://downloads.haskell.org/ghc/latest/docs/users_guide/profiling.html#using-ticky-ticky-profiling-for-implementors" if [ -f "$EDITION_NAME.ticky" ]; then
echo_warn "Lookup referenced STG names dumped to their respective module files: dist-newstyle/**/*.dump-stg-final" # Sort the main part of the profile by allocations and reassemble:
# TODO some analysis utilities: TICKY_TEMPD=$(mktemp -d)
# - sort by top awk -v TICKY_TEMPD="$TICKY_TEMPD" \
# - find dictionaries ("+" args) '/-------------[-]+$|\*\*\*\*\*[*]+$/{n++}{print >TICKY_TEMPD "/" "x" n }' \
"$EDITION_NAME.ticky"
ticky_tmp=$(mktemp hasura_devsh_ticky.tmp.XXXXXXX)
{
cat "$TICKY_TEMPD/x" "$TICKY_TEMPD/x1" ;
head -n1 "$TICKY_TEMPD/x2" ;
# This is the main section we care about, with allocation counts by name:
tail -n +2 "$TICKY_TEMPD/x2" | sort -k2 -r -n | tee "$ticky_tmp";
cat "$TICKY_TEMPD/x3"
} >> "$TICKY_FILENAME"
# Make sure we didn't screw anything up, e.g. if ticky format changes:
TICKY_FILENAME_sz=$(wc -c <"$TICKY_FILENAME")
wc_c=$(wc -c <"$EDITION_NAME.ticky")
if [ "$TICKY_FILENAME_sz" -ne "$wc_c" ]; then
echo_error "Erm... seems our processing of ticky file has a bug. Please fix me"
fi
rm -r "$TICKY_TEMPD"
echo_warn "Done. View the ticky report at: $TICKY_FILENAME"
echo_warn "See: https://downloads.haskell.org/ghc/latest/docs/users_guide/profiling.html#using-ticky-ticky-profiling-for-implementors"
echo_warn "Lookup referenced STG names dumped to their respective module files: dist-newstyle/**/*.dump-stg-final"
### Do some additional analysis:
# Extract module names, along with allocation counts
ticky_tmp2=$(mktemp hasura_devsh_ticky2.tmp.XXXXXXX)
if command -v rg >/dev/null ; then
rg -o ' +[0-9]+ +([0-9]+).*( | \()([A-Z][a-zA-Z]*(\.[A-Z][A-Za-z]*)*)' -r '$1 $3' \
"$ticky_tmp" > "$ticky_tmp2"
awk '{sum[$2]+=$1} END {for (val in sum) printf "%'"'"'20d\t%s\n", sum[val], val }' "$ticky_tmp2" \
| sort -nr -k1 \
> "$TICKY_FILENAME.modules"
echo
echo_warn "Here are the top modules by allocation (see $TICKY_FILENAME.modules for all):"
head -n5 "$TICKY_FILENAME.modules"
echo
else
echo_error "Please install ripgrep (rg) to get per-module allocation summary"
fi
# NOTE: this should equal the sum of allocations in all entries
# in the list and we find it does within ~1% for e.g. a benchmark
# workload, but it's not clear why it doesn't exactly match:
instrumented_bytes_allocated=$(grep ALLOC_HEAP_tot "$EDITION_NAME.ticky" | awk '{print $1}')
echo_warn "There were..."
printf "%'20d\n" "$instrumented_bytes_allocated"
echo_warn "...bytes allocated from instrumented code in the profile."
echo_warn "Compare this to the \"bytes allocated in the heap\" reported from the"
echo_warn "'+RTS -s' above to see how many allocations aren't visible due to dependencies"
echo_warn "not being instrumented (TODO --prof-ticky-all mode, maybe)"
rm "$ticky_tmp" "$ticky_tmp2" "$EDITION_NAME.ticky"
else
echo_error "Hmmm. $EDITION_NAME.ticky wasn't generated for some reason..."
fi
;; ;;
heap-infomap) heap-infomap)
if command -v eventlog2html >/dev/null ; then if command -v eventlog2html >/dev/null ; then
@ -418,14 +482,18 @@ if [ "$MODE" = "graphql-engine" ] || [ "$MODE" = "graphql-engine-pro" ]; then
;; ;;
time) time)
if command -v profiterole >/dev/null ; then if command -v profiterole >/dev/null ; then
echo_warn "Running profiterole..." if [ -f "$EDITION_NAME.prof" ]; then
profiterole "$EDITION_NAME.prof" echo_warn "Running profiterole..."
echo_warn "Done. Check out..." profiterole "$EDITION_NAME.prof"
echo_warn " - $EDITION_NAME.prof ...for the top-down report" echo_warn "Done. Check out..."
echo_warn " - $EDITION_NAME.profiterole.html ...for the top-down report" echo_warn " - $EDITION_NAME.prof ...for the top-down report"
echo_warn "Lookup referenced STG names dumped to their respective module files: dist-newstyle/**/*.dump-stg-final" echo_warn " - $EDITION_NAME.profiterole.html ...for the folded report"
echo_warn "Lookup referenced STG names dumped to their respective module files: dist-newstyle/**/*.dump-stg-final"
else
echo_error "No $EDITION_NAME.prof was created... :("
fi
else else
echo_warn "Please install profiterole" echo_warn "You may wish to install profiterole"
fi fi
;; ;;
"") "")
@ -475,6 +543,8 @@ if [ "$MODE" = "graphql-engine" ] || [ "$MODE" = "graphql-engine-pro" ]; then
export HASURA_GRAPHQL_DATABASE_URL=${HASURA_GRAPHQL_DATABASE_URL-$PG_DB_URL} export HASURA_GRAPHQL_DATABASE_URL=${HASURA_GRAPHQL_DATABASE_URL-$PG_DB_URL}
export HASURA_GRAPHQL_SERVER_PORT=${HASURA_GRAPHQL_SERVER_PORT-8181} export HASURA_GRAPHQL_SERVER_PORT=${HASURA_GRAPHQL_SERVER_PORT-8181}
# Add 'developer' to the default list, for more visiblility:
export HASURA_GRAPHQL_ENABLED_APIS=metadata,graphql,pgdump,config,developer
echo_pretty "We will connect to postgres at '$HASURA_GRAPHQL_DATABASE_URL'" echo_pretty "We will connect to postgres at '$HASURA_GRAPHQL_DATABASE_URL'"
echo_pretty "If you haven't overridden HASURA_GRAPHQL_DATABASE_URL, you can" echo_pretty "If you haven't overridden HASURA_GRAPHQL_DATABASE_URL, you can"
@ -521,7 +591,7 @@ if [ "$MODE" = "graphql-engine" ] || [ "$MODE" = "graphql-engine-pro" ]; then
echo_pretty " $ yarn install && yarn server-build:$EDITION_ABBREV" echo_pretty " $ yarn install && yarn server-build:$EDITION_ABBREV"
echo_pretty "" echo_pretty ""
echo_pretty "Useful endpoints when compiling with '$EDITION_NAME:developer' and running with '+RTS -T'" echo_pretty "Useful endpoints when compiling with '$EDITION_NAME:developer' and running with '+RTS -T'"
echo_pretty " http://127.0.0.1:$HASURA_GRAPHQL_SERVER_PORT/dev/subscriptions" echo_pretty " http://127.0.0.1:$HASURA_GRAPHQL_SERVER_PORT/dev/subscriptions/extended"
echo_pretty " http://127.0.0.1:$HASURA_GRAPHQL_SERVER_PORT/dev/plan_cache" echo_pretty " http://127.0.0.1:$HASURA_GRAPHQL_SERVER_PORT/dev/plan_cache"
echo_pretty "" echo_pretty ""
echo_pretty "To view realtime GC stats and other info open in your browser:" echo_pretty "To view realtime GC stats and other info open in your browser:"

View File

@ -1,6 +1,6 @@
{ {
"cabal-install": "3.10.1.0", "cabal-install": "3.10.1.0",
"ghc": "9.2.5", "ghc": "9.4.5",
"hlint": "3.4.1", "hlint": "3.5",
"ormolu": "0.7.0.0" "ormolu": "0.7.0.0"
} }

View File

@ -1,5 +1,6 @@
{-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
{-# OPTIONS_GHC -Wno-error=deprecations #-}
-- | Regression tests for issue 8345. -- | Regression tests for issue 8345.
module Test.Regression.NullRemoteRelationship8345Spec (spec) where module Test.Regression.NullRemoteRelationship8345Spec (spec) where

View File

@ -546,8 +546,6 @@ joinArticleTests = describe "join from article object" do
[yaml| [yaml|
data: data:
object: object:
# to circumvent https://github.com/morpheusgraphql/morpheus-graphql/issues/687
__typename: Article
title: "Article1" title: "Article1"
|] |]
shouldReturnYaml shouldReturnYaml
@ -584,8 +582,6 @@ joinWriterTests = describe "join from writer object" do
[yaml| [yaml|
data: data:
object: object:
# to circumvent https://github.com/morpheusgraphql/morpheus-graphql/issues/687
__typename: Writer
name: "Writer1" name: "Writer1"
articles: articles:
- title: Article1 - title: Article1
@ -625,8 +621,6 @@ joinArtistTests = describe "join from artist object" do
[yaml| [yaml|
data: data:
object: object:
# to circumvent https://github.com/morpheusgraphql/morpheus-graphql/issues/687
__typename: Artist
name: "Artist1" name: "Artist1"
articles: articles:
- title: Article1 - title: Article1
@ -690,8 +684,6 @@ deeplyNestedJoinTests = describe "join from artist object" do
baz: baz:
- title: Article2 - title: Article2
- title: Article4 - title: Article4
# to circumvent https://github.com/morpheusgraphql/morpheus-graphql/issues/687
__typename: Writer
- local_articles: - local_articles:
- title: Article1 - title: Article1
foo: foo:
@ -709,8 +701,6 @@ deeplyNestedJoinTests = describe "join from artist object" do
baz: baz:
- title: Article3 - title: Article3
- title: Article4 - title: Article4
# to circumvent https://github.com/morpheusgraphql/morpheus-graphql/issues/687
__typename: Artist
|] |]
shouldReturnYaml shouldReturnYaml
testEnvironment testEnvironment
@ -743,14 +733,10 @@ deeplyNestedJoinTests = describe "join from artist object" do
- bar: - bar:
- title: Article3 - title: Article3
- title: Article4 - title: Article4
# to circumvent https://github.com/morpheusgraphql/morpheus-graphql/issues/687
__typename: Writer
- bar: - bar:
baz: baz:
- title: Article2 - title: Article2
- title: Article4 - title: Article4
# to circumvent https://github.com/morpheusgraphql/morpheus-graphql/issues/687
__typename: Artist
|] |]
shouldReturnYaml shouldReturnYaml
testEnvironment testEnvironment

View File

@ -1,5 +1,6 @@
{-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
{-# OPTIONS_GHC -Wno-error=deprecations #-}
-- | This file contains all the contexts for setting up remote relationships between -- | This file contains all the contexts for setting up remote relationships between
-- different kinds of sources. -- different kinds of sources.

View File

@ -1,5 +1,6 @@
{-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
{-# OPTIONS_GHC -Wno-error=deprecations #-}
-- | Tests for array remote relationships to databases. Remote relationships are -- | Tests for array remote relationships to databases. Remote relationships are
-- relationships that are not local to a given source or remote schema, and are -- relationships that are not local to a given source or remote schema, and are

View File

@ -1,5 +1,6 @@
{-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
{-# OPTIONS_GHC -Wno-error=deprecations #-}
-- | Tests for object remote relationships to databases. Remote relationships -- | Tests for object remote relationships to databases. Remote relationships
-- are relationships that are not local to a given source or remote schema, and -- are relationships that are not local to a given source or remote schema, and

View File

@ -1,5 +1,6 @@
{-# LANGUAGE DuplicateRecordFields #-} {-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE QuasiQuotes #-} {-# LANGUAGE QuasiQuotes #-}
{-# OPTIONS_GHC -Wno-error=deprecations #-}
-- | Tests for remote relationships to remote schemas. Remote relationships are -- | Tests for remote relationships to remote schemas. Remote relationships are
-- relationships that are not local to a given source or remote schema, and are -- relationships that are not local to a given source or remote schema, and are

View File

@ -61,7 +61,6 @@ import Control.Arrow.Extended
import Control.Lens (makeLensesFor, makePrisms) import Control.Lens (makeLensesFor, makePrisms)
import Data.Aeson import Data.Aeson
import Data.Aeson.Encoding qualified as J import Data.Aeson.Encoding qualified as J
import Data.Aeson.Internal
import Data.Aeson.Key qualified as K import Data.Aeson.Key qualified as K
import Data.Aeson.Types import Data.Aeson.Types
import Data.Parser.JSONPath (encodeJSONPath) import Data.Parser.JSONPath (encodeJSONPath)

View File

@ -115,10 +115,6 @@ instance J.FromJSON C.CronSchedule where
instance J.ToJSON C.CronSchedule where instance J.ToJSON C.CronSchedule where
toJSON = J.String . C.serializeCronSchedule toJSON = J.String . C.serializeCronSchedule
instance J.FromJSONKey Void
instance J.ToJSONKey Void
instance J.FromJSON ByteString where instance J.FromJSON ByteString where
parseJSON = J.withText "ByteString" (pure . encodeUtf8) parseJSON = J.withText "ByteString" (pure . encodeUtf8)

View File

@ -7,8 +7,8 @@ where
import Control.Applicative import Control.Applicative
import Data.Aeson (Key) import Data.Aeson (Key)
import Data.Aeson qualified as J import Data.Aeson qualified as J
import Data.Aeson.Internal (JSONPath, JSONPathElement (..))
import Data.Aeson.Key qualified as K import Data.Aeson.Key qualified as K
import Data.Aeson.Types (JSONPath, JSONPathElement (..))
import Data.Attoparsec.Text import Data.Attoparsec.Text
import Data.Bifunctor qualified as Bifunctor import Data.Bifunctor qualified as Bifunctor
import Data.Text qualified as T import Data.Text qualified as T

View File

@ -30,7 +30,7 @@ import GHC.Records (HasField (..))
import GHC.TypeLits (KnownSymbol, sameSymbol, symbolVal) import GHC.TypeLits (KnownSymbol, sameSymbol, symbolVal)
import Hasura.Prelude import Hasura.Prelude
import Unsafe.Coerce (unsafeCoerce) import Unsafe.Coerce (unsafeCoerce)
import "some" Data.GADT.Compare import "dependent-sum" Data.GADT.Compare
-- | The 'Select' class provides a way to access subparts of a product type using a reified -- | The 'Select' class provides a way to access subparts of a product type using a reified
-- 'Selector'. A @'Selector' a b@ is essentially a function from @a@ to @b@, and indeed 'select' -- 'Selector'. A @'Selector' a b@ is essentially a function from @a@ to @b@, and indeed 'select'

View File

@ -46,7 +46,7 @@ import Hasura.GraphQL.Parser.Internal.Scalars
import Hasura.GraphQL.Parser.Schema import Hasura.GraphQL.Parser.Schema
import Hasura.GraphQL.Parser.Variable import Hasura.GraphQL.Parser.Variable
import Language.GraphQL.Draft.Syntax qualified as G import Language.GraphQL.Draft.Syntax qualified as G
import Type.Reflection (Typeable, typeRep, (:~:) (Refl)) import Type.Reflection (Typeable, typeRep)
import Witherable (catMaybes) import Witherable (catMaybes)
import Prelude import Prelude

View File

@ -30,7 +30,7 @@ where
import Control.Monad ((>=>)) import Control.Monad ((>=>))
import Data.Aeson qualified as J import Data.Aeson qualified as J
import Data.Aeson.Internal qualified as J.Internal import Data.Aeson.Types qualified as J.Internal
import Data.Int (Int32, Int64) import Data.Int (Int32, Int64)
import Data.Scientific (Scientific) import Data.Scientific (Scientific)
import Data.Scientific qualified as S import Data.Scientific qualified as S

View File

@ -1,6 +1,6 @@
module Hasura.GraphQL.Parser.MonadParseSpec (spec) where module Hasura.GraphQL.Parser.MonadParseSpec (spec) where
import Data.Aeson.Internal import Data.Aeson.Types
import Hasura.Base.ErrorMessage import Hasura.Base.ErrorMessage
import Hasura.GraphQL.Parser.Class import Hasura.GraphQL.Parser.Class
import Hasura.GraphQL.Parser.ErrorCode import Hasura.GraphQL.Parser.ErrorCode

View File

@ -39,6 +39,7 @@ library
, lifted-base , lifted-base
, managed , managed
, morpheus-graphql , morpheus-graphql
, morpheus-graphql-server
, mtl , mtl
, network , network
, odbc , odbc

View File

@ -19,7 +19,7 @@ import Data.IORef
import Data.Kind qualified as K import Data.Kind qualified as K
import Language.Haskell.TH qualified as TH import Language.Haskell.TH qualified as TH
import System.IO.Unsafe (unsafeInterleaveIO) import System.IO.Unsafe (unsafeInterleaveIO)
import Type.Reflection (Typeable, typeRep, (:~:) (..)) import Type.Reflection (Typeable, typeRep)
import Prelude import Prelude
{- Note [Tying the knot] {- Note [Tying the knot]

View File

@ -291,6 +291,7 @@ newtype BackendPollerKey = BackendPollerKey {unBackendPollerKey :: AB.AnyBackend
type PollerMap streamCursor = STMMap.Map BackendPollerKey (Poller streamCursor) type PollerMap streamCursor = STMMap.Map BackendPollerKey (Poller streamCursor)
-- | For dev debugging, output subject to change.
dumpPollerMap :: Bool -> PollerMap streamCursor -> IO J.Value dumpPollerMap :: Bool -> PollerMap streamCursor -> IO J.Value
dumpPollerMap extended pollerMap = dumpPollerMap extended pollerMap =
fmap J.toJSON $ do fmap J.toJSON $ do

View File

@ -90,6 +90,7 @@ initSubscriptionsState pollHook =
<*> pure pollHook <*> pure pollHook
<*> TMap.new <*> TMap.new
-- | For dev debugging, output subject to change.
dumpSubscriptionsState :: Bool -> LiveQueriesOptions -> StreamQueriesOptions -> SubscriptionsState -> IO J.Value dumpSubscriptionsState :: Bool -> LiveQueriesOptions -> StreamQueriesOptions -> SubscriptionsState -> IO J.Value
dumpSubscriptionsState extended liveQOpts streamQOpts (SubscriptionsState lqMap streamMap _ _) = do dumpSubscriptionsState extended liveQOpts streamQOpts (SubscriptionsState lqMap streamMap _ _) = do
lqMapJ <- dumpPollerMap extended lqMap lqMapJ <- dumpPollerMap extended lqMap

View File

@ -29,8 +29,8 @@ where
import Control.Lens hiding (index) import Control.Lens hiding (index)
import Data.Aeson qualified as J import Data.Aeson qualified as J
import Data.Aeson.Internal qualified as J
import Data.Aeson.Key qualified as K import Data.Aeson.Key qualified as K
import Data.Aeson.Types qualified as J
import Data.ByteString.Lazy qualified as BL import Data.ByteString.Lazy qualified as BL
import Data.Has import Data.Has
import Data.HashMap.Strict.Extended qualified as HashMap import Data.HashMap.Strict.Extended qualified as HashMap

View File

@ -47,10 +47,9 @@ where
import Autodocodec (Codec (CommentCodec), HasCodec (codec), JSONCodec, bimapCodec, dimapCodec, named, valueCodec) import Autodocodec (Codec (CommentCodec), HasCodec (codec), JSONCodec, bimapCodec, dimapCodec, named, valueCodec)
import Control.Lens.Plated import Control.Lens.Plated
import Data.Aeson.Extended import Data.Aeson.Extended
import Data.Aeson.Internal
import Data.Aeson.Key qualified as K import Data.Aeson.Key qualified as K
import Data.Aeson.KeyMap qualified as KM import Data.Aeson.KeyMap qualified as KM
import Data.Aeson.Types (parseEither) import Data.Aeson.Types
import Data.HashMap.Strict qualified as HashMap import Data.HashMap.Strict qualified as HashMap
import Data.Monoid import Data.Monoid
import Data.Text.Extended import Data.Text.Extended

View File

@ -43,7 +43,7 @@ import Control.Lens ((^..))
import Crypto.Hash qualified as Crypto import Crypto.Hash qualified as Crypto
import Data.Aeson import Data.Aeson
import Data.Aeson qualified as J import Data.Aeson qualified as J
import Data.Aeson.Internal import Data.Aeson.Types
import Data.ByteArray (convert) import Data.ByteArray (convert)
import Data.ByteString qualified as B import Data.ByteString qualified as B
import Data.ByteString.Base16 qualified as Base16 import Data.ByteString.Base16 qualified as Base16

View File

@ -17,7 +17,7 @@ import Data.Text qualified as T
import Data.Text.Conversions (FromText (..), ToText (..)) import Data.Text.Conversions (FromText (..), ToText (..))
import Hasura.Prelude import Hasura.Prelude
import Language.Haskell.TH import Language.Haskell.TH
import Language.Haskell.TH.Syntax import Language.Haskell.TH.Syntax hiding (makeRelativeToProject) -- TODO can we ditch file-embed?
import Text.Regex.TDFA ((=~~)) import Text.Regex.TDFA ((=~~))
data Version data Version

View File

@ -58,7 +58,7 @@ spec =
expectedSQL = expectedSQL =
[QQ.sql| [QQ.sql|
UPDATE "public"."test" UPDATE "public"."test"
SET "name" = ('new name')::text, "description" = ('other')::text SET "description" = ('other')::text, "name" = ('new name')::text
WHERE WHERE
(("public"."test"."id") = (('1')::integer)) (("public"."test"."id") = (('1')::integer))
RETURNING * , ('true')::boolean AS "check__constraint" RETURNING * , ('true')::boolean AS "check__constraint"

View File

@ -37,7 +37,6 @@ import Hasura.Table.Cache
( TableCoreInfoG (_tciName), ( TableCoreInfoG (_tciName),
TableInfo (_tiCoreInfo), TableInfo (_tiCoreInfo),
) )
import Language.GraphQL.Draft.Syntax qualified as G
import Language.GraphQL.Draft.Syntax.QQ qualified as G import Language.GraphQL.Draft.Syntax.QQ qualified as G
import Test.Aeson.Expectation (shouldBeSubsetOf) import Test.Aeson.Expectation (shouldBeSubsetOf)
import Test.Hspec import Test.Hspec

View File

@ -11,7 +11,6 @@ import Hasura.Prelude
import Hasura.RQL.IR.BoolExp (OpExpG (..)) import Hasura.RQL.IR.BoolExp (OpExpG (..))
import Hasura.RQL.IR.Returning (MutFldG (..), MutationOutputG (..)) import Hasura.RQL.IR.Returning (MutFldG (..), MutationOutputG (..))
import Hasura.RQL.Types.Instances () import Hasura.RQL.Types.Instances ()
import Language.GraphQL.Draft.Syntax qualified as Syntax
import Test.Backend.Postgres.Misc qualified as P import Test.Backend.Postgres.Misc qualified as P
import Test.Hspec import Test.Hspec
import Test.Parser.Expectation import Test.Parser.Expectation

View File

@ -12,7 +12,7 @@ module Test.Parser.Monad
where where
import Control.Monad.Memoize import Control.Monad.Memoize
import Data.Aeson.Internal (JSONPathElement) import Data.Aeson.Types (JSONPathElement)
import Data.Has (Has (..)) import Data.Has (Has (..))
import Data.Text qualified as T import Data.Text qualified as T
import GHC.Stack import GHC.Stack

View File

@ -4,7 +4,7 @@
X-Hasura-Role: admin X-Hasura-Role: admin
status: 200 status: 200
response: response:
body: "foo=bar&baz=world" body: baz=world&foo=bar
headers: headers:
- - content-type - - content-type
- application/x-www-form-urlencoded - application/x-www-form-urlencoded

View File

@ -173,8 +173,9 @@ class TestRemoteSchemaBasic:
def test_add_schema_conflicts(self, hge_ctx, gql_server): def test_add_schema_conflicts(self, hge_ctx, gql_server):
"""add 2 remote schemas with same node or types""" """add 2 remote schemas with same node or types"""
q = mk_add_remote_q('simple 2', f'{gql_server.url}/hello-graphql') q = mk_add_remote_q('simple 2', f'{gql_server.url}/hello-graphql')
# FYI: resp = ordereddict([('code', 'invalid-configuration'), ('error', "Inconsistent object: Duplicate remote field 'hello', Incons...on', "Inconsistent object: Duplicate remote field 'delayedHello'"), ('type', 'remote_schema')])]), ('path', '$.args')])
resp = hge_ctx.v1q(q, expected_status_code = 400) resp = hge_ctx.v1q(q, expected_status_code = 400)
assert resp['code'] == 'unexpected' assert resp['code'] == 'invalid-configuration', resp
def test_remove_schema_error(self, hge_ctx): def test_remove_schema_error(self, hge_ctx):
"""remove remote schema which is not added""" """remove remote schema which is not added"""