Use arrows instead of monads to define the schema cache construction

This commit is contained in:
Alexis King 2019-11-26 16:49:42 -06:00
parent 1387722970
commit 5b969208c6
51 changed files with 1749 additions and 776 deletions

3
server/.gitignore vendored
View File

@ -1,5 +1,6 @@
__pycache__/
dist
dist-newstyle/
cabal-dev
Pipfile
Pipfile.lock
@ -15,8 +16,10 @@ Pipfile.lock
*.tix
.hsenv
.cabal-sandbox/
.ghc.environment*
cabal.sandbox.config
cabal.config
cabal.project.local
*.prof*
*.aux
*.hp

View File

@ -27,7 +27,7 @@ flag profile
common common-all
ghc-options:
-O2 -foptimal-applicative-do
-O2 -fmax-simplifier-iterations=20 -foptimal-applicative-do
-Wall -Wcompat -Wincomplete-record-updates -Wincomplete-uni-patterns -Wredundant-constraints
-fdefer-typed-holes
@ -90,11 +90,12 @@ library
, process
, http-client-tls
, profunctors
, deepseq
-- `these >=1` is split into several different packages, but our current stack
-- resolver has `these <1`; when we upgrade we just need to add an extra
-- dependency on `semialign`
, these <1
, these >=0.7.1 && <0.8
-- Encoder related
, uuid
@ -193,7 +194,8 @@ library
, QuickCheck
, generic-arbitrary
exposed-modules: Control.Monad.Stateless
exposed-modules: Control.Arrow.Extended
, Control.Monad.Stateless
, Control.Monad.Unique
, Hasura.Prelude
@ -374,8 +376,8 @@ test-suite graphql-engine-tests
, base
, bytestring
, graphql-engine
, hspec
, hspec-core
, hspec >=2.6.1 && <3
, hspec-core >=2.6.1 && <3
, hspec-expectations-lifted
, http-client
, http-client-tls

View File

@ -0,0 +1,27 @@
module Control.Arrow.Embed
( ArrowEmbed(..)
) where
import Prelude hiding (id, (.))
import Control.Arrow.Extended
import Control.Category
-- | Allows “embedding” an arrow in another arrow, assuming the target arrow supports the
-- necessary operations. For example, a 'Kleisli' arrow can be embedded in any arrow that implements
-- 'ArrowKleisli'.
class (Arrow arr1, Arrow arr2) => ArrowEmbed arr1 arr2 where
embedA :: arr1 a b -> arr2 a b
instance (ArrowKleisli m arr) => ArrowEmbed (Kleisli m) arr where
embedA (Kleisli f) = arrM f
instance (ArrowChoice arr1, ArrowChoice arr2, ArrowError e arr2, ArrowEmbed arr1 arr2)
=> ArrowEmbed (ErrorA e arr1) arr2 where
embedA (ErrorA f) = embedA f >>> (throwA ||| returnA)
instance (ArrowReader r arr2, ArrowEmbed arr1 arr2) => ArrowEmbed (ReaderA r arr1) arr2 where
embedA (ReaderA f) = (id &&& askA) >>> embedA f
instance (ArrowWriter w arr2, ArrowEmbed arr1 arr2) => ArrowEmbed (WriterA w arr1) arr2 where
embedA (WriterA f) = embedA f >>> second tellA >>> arr fst

View File

@ -0,0 +1,428 @@
{-# OPTIONS_GHC -Wno-inline-rule-shadowing -Wno-orphans #-} -- see Note [Arrow rewrite rules]
{-# LANGUAGE Arrows #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE UndecidableInstances #-}
{-# LANGUAGE ViewPatterns #-}
-- | The missing standard library for arrows. Some of the functionality in this module is similar to
-- Patersons original @arrows@ library, but it has been modernized to work with recent versions of
-- GHC.
module Control.Arrow.Extended
( module Control.Arrow
, (>->)
, (<-<)
, foldlA'
, traverseA
, onNothingA
, ArrowTrans(..)
, ArrowKleisli(..)
, bindA
, ArrowError(..)
, mapErrorA
, ErrorA(..)
, ArrowReader(..)
, ReaderA(..)
, ArrowWriter(..)
, WriterA(WriterA, runWriterA)
) where
import Prelude hiding (id, (.))
import Control.Arrow
import Control.Category
import Control.Monad
import Control.Monad.Error.Class
import Control.Monad.Reader.Class
import Control.Monad.Writer.Class
import Data.Foldable
import Data.Tuple (swap)
infixl 1 >->
infixr 1 <-<
-- | The analog to '>>=' for arrow commands. In @proc@ notation, '>->' can be used to chain the
-- output of one command into the input of another.
--
-- See also Note [Weird control operator types].
(>->) :: (Arrow arr) => arr (e, s) a -> arr (e, (a, s)) b -> arr (e, s) b
f >-> g = proc (e, s) -> do
x <- f -< (e, s)
g -< (e, (x, s))
{-# INLINE (>->) #-}
(<-<) :: (Arrow arr) => arr (e, (a, s)) b -> arr (e, s) a -> arr (e, s) b
(<-<) = flip (>->)
{-# INLINE (<-<) #-}
-- | 'foldl'' lifted to arrows. See also Note [Weird control operator types].
foldlA' :: (ArrowChoice arr, Foldable t) => arr (e, (b, (a, s))) b -> arr (e, (b, (t a, s))) b
foldlA' f = arr (\(e, (v, (xs, s))) -> (e, (v, (toList xs, s)))) >>> go where
go = uncons >>> (id ||| step)
uncons = arr \(e, (v, (xs, s))) -> case xs of
[] -> Left v
x:xs' -> Right ((e, (v, (x, s))), (e, (xs', s)))
step = first f >>> arr (\(!v, (e, (xs, s))) -> (e, (v, (xs, s)))) >>> go
-- | An indexed version of Twan van Laarhovens @FunList@ type (see
-- <https://twanvl.nl/blog/haskell/non-regular1>). A value of type @'Traversal' a b (t b)@ is a
-- concrete representation of a traversal applied to a data structure of type @t a@ and producing a
-- value of type @t b@. This explicit representation is used to implement 'traverseA' using only
-- 'ArrowChoice'.
data Traversal a r b
= Done b
| Yield a !(r -> Traversal a r b)
instance Functor (Traversal a r) where
fmap f = \case
Done x -> Done (f x)
Yield v k -> Yield v (fmap f . k)
instance Applicative (Traversal a r) where
pure = Done
tf <*> tx = case tf of
Done f -> fmap f tx
Yield v k -> Yield v ((<*> tx) . k)
traversal :: (Traversable t) => t a -> Traversal a b (t b)
traversal = traverse (flip Yield Done)
-- | 'traverse' lifted to arrows. See also Note [Weird control operator types].
traverseA :: (ArrowChoice arr, Traversable t) => arr (e, (a, s)) b -> arr (e, (t a, s)) (t b)
traverseA f = second (first $ arr traversal) >>> go where
go = proc (e, (as, s)) -> case as of
Done bs -> returnA -< bs
Yield a k -> do
b <- f -< (e, (a, s))
go -< (e, (k b, s))
onNothingA :: (ArrowChoice arr) => arr (e, s) a -> arr (e, (Maybe a, s)) a
onNothingA f = proc (e, (v, s)) -> case v of
Just a -> returnA -< a
Nothing -> f -< (e, s)
{-# RULES -- These rules are missing from Control.Arrow; see Note [Arrow rewrite rules]
"arr/arr/R" forall f g h. arr f . (arr g . h) = arr (f . g) . h
"first/push" [~1] forall f g. first (f . g) = first f . first g
"second/push" [~1] forall f g. second (f . g) = second f . second g
"left/push" [~1] forall f g. left (f . g) = left f . left g
"right/push" [~1] forall f g. right (f . g) = right f . right g
"first/pull" [1] forall f g. first f . first g = first (f . g)
"second/pull" [1] forall f g. second f . second g = second (f . g)
"left/pull" [1] forall f g. left f . left g = left (f . g)
"right/pull" [1] forall f g. right f . right g = right (f . g)
#-}
-- | The class of /Kleisli arrows/, arrows made from monadic functions. Instances should satisfy
-- the following laws:
--
-- * @'arrM' ('pure' '.' /f/)@ ≡ @'arr' /f/@
-- * @('arrM' /f/ '>>>' 'arrM' /g/)@ ≡ @'arrM' (/f/ '>=>' /g/)@.
class (Monad m, Arrow arr) => ArrowKleisli m arr | arr -> m where
arrM :: (a -> m b) -> arr a b
{-# RULES -- see Note [Arrow rewrite rules]
"arr/arrM" forall f g. arr f . arrM g = arrM (fmap f . g)
"arrM/arr" forall f g. arrM f . arr g = arrM (f . g)
"arrM/arrM" forall f g. arrM f . arrM g = arrM (f <=< g)
"arr/arrM/R" forall f g h. arr f . (arrM g . h) = arrM (fmap f . g) . h
"arrM/arr/R" forall f g h. arrM f . (arr g . h) = arrM (f . g) . h
"arrM/arrM/R" forall f g h. arrM f . (arrM g . h) = arrM (f <=< g) . h
"first/arrM" forall f. first (arrM f) = arrM (runKleisli (first (Kleisli f)))
"second/arrM" forall f. second (arrM f) = arrM (runKleisli (second (Kleisli f)))
"left/arrM" forall f. left (arrM f) = arrM (runKleisli (left (Kleisli f)))
"right/arrM" forall f. right (arrM f) = arrM (runKleisli (right (Kleisli f)))
"***/arrM" forall f g. arrM f *** arrM g = arrM (runKleisli (Kleisli f *** Kleisli g))
"&&&/arrM" forall f g. arrM f &&& arrM g = arrM (runKleisli (Kleisli f &&& Kleisli g))
"+++/arrM" forall f g. arrM f +++ arrM g = arrM (runKleisli (Kleisli f +++ Kleisli g))
"|||/arrM" forall f g. arrM f ||| arrM g = arrM (runKleisli (Kleisli f ||| Kleisli g))
#-}
-- | A combinator that serves a similar role to 'returnA' in arrow notation, except that the
-- argument is a monadic action instead of a pure value. Just as 'returnA' is actually just
-- @'arr' 'id'@, 'ruleA' is just @'arrM' 'id'@, but it is provided as a separate function for
-- clarity.
--
-- 'bindA' is useful primarily because it allows executing a monadic action using arrow inputs
-- currently in scope. For example:
--
-- @
-- proc (a, b) -> do
-- x <- foo -< a
-- y <- bar -< b
-- 'bindA' -< f x y
-- @
--
-- The last statement is equivalent to @'arrM' ('uncurry' f) -< (x, y)@, but the use of 'bindA'
-- allows it to be expressed more directly.
bindA :: (ArrowKleisli m arr) => arr (m a) a
bindA = arrM id
{-# INLINE bindA #-}
instance (Monad m) => ArrowKleisli m (Kleisli m) where
arrM = Kleisli
class (Arrow arr, Arrow (t arr)) => ArrowTrans t arr where
liftA :: arr a b -> t arr a b
class (Arrow arr) => ArrowError e arr | arr -> e where
throwA :: arr e a
-- see Note [Weird control operator types]
catchA :: arr (a, s) b -> arr (a, (e, s)) b -> arr (a, s) b
mapErrorA :: (ArrowError e arr) => arr (a, s) b -> arr (a, ((e -> e), s)) b
mapErrorA f = proc (a, (g, s)) -> (f -< (a, s)) `catchA` \e -> throwA -< g e
class (Arrow arr) => ArrowReader r arr | arr -> r where
askA :: arr a r
-- see Note [Weird control operator types]
localA :: arr (a, s) b -> arr (a, (r, s)) b
class (Monoid w, Arrow arr) => ArrowWriter w arr | arr -> w where
tellA :: arr w ()
listenA :: arr a b -> arr a (b, w)
instance (MonadError e m) => ArrowError e (Kleisli m) where
throwA = Kleisli throwError
catchA (Kleisli f) (Kleisli g) = Kleisli \(a, s) -> f (a, s) `catchError` \e -> g (a, (e, s))
instance (MonadReader r m) => ArrowReader r (Kleisli m) where
askA = Kleisli $ const ask
localA (Kleisli f) = Kleisli \(a, (r, s)) -> local (const r) (f (a, s))
instance (MonadWriter w m) => ArrowWriter w (Kleisli m) where
tellA = Kleisli tell
listenA (Kleisli f) = Kleisli (listen . f)
newtype ErrorA e arr a b = ErrorA { runErrorA :: arr a (Either e b) }
deriving (Functor)
instance (ArrowChoice arr) => Category (ErrorA e arr) where
id = ErrorA (arr Right)
ErrorA f . ErrorA g = ErrorA ((arr Left ||| f) . g)
instance (ArrowChoice arr) => Arrow (ErrorA e arr) where
arr f = ErrorA (arr (Right . f))
first (ErrorA f) = ErrorA (arr (fmap swap . sequence . swap) . first f)
instance (ArrowChoice arr) => ArrowChoice (ErrorA e arr) where
left (ErrorA f) = ErrorA (arr (either (either Left (Right . Left)) (Right . Right)) . left f)
instance (ArrowChoice arr, ArrowApply arr) => ArrowApply (ErrorA e arr) where
app = ErrorA (app . first (arr runErrorA))
instance (ArrowChoice arr) => ArrowTrans (ErrorA e) arr where
liftA f = ErrorA (arr Right . f)
instance (ArrowChoice arr) => ArrowError e (ErrorA e arr) where
throwA = ErrorA (arr Left)
catchA (ErrorA f) (ErrorA g) = ErrorA proc (a, s) -> do
r <- f -< (a, s)
case r of
Left e -> g -< (a, (e, s))
Right v -> returnA -< Right v
instance (ArrowKleisli m arr, ArrowChoice arr) => ArrowKleisli m (ErrorA e arr) where
arrM = liftA . arrM
instance (ArrowReader r arr, ArrowChoice arr) => ArrowReader r (ErrorA e arr) where
askA = liftA askA
localA (ErrorA f) = ErrorA (localA f)
instance (ArrowWriter w arr, ArrowChoice arr) => ArrowWriter w (ErrorA e arr) where
tellA = liftA tellA
listenA (ErrorA f) = ErrorA (arr (\(r, w) -> (, w) <$> r) . listenA f)
newtype ReaderA r arr a b = ReaderA { runReaderA :: arr (a, r) b }
instance (Arrow arr) => Category (ReaderA r arr) where
id = ReaderA (arr fst)
ReaderA f . ReaderA g = ReaderA proc (a, r) -> do
b <- g -< (a, r)
f -< (b, r)
instance (Arrow arr) => Arrow (ReaderA r arr) where
arr f = ReaderA (arr (f . fst))
first (ReaderA f) = ReaderA proc ((a, c), r) -> do
b <- f -< (a, r)
returnA -< (b, c)
instance (ArrowChoice arr) => ArrowChoice (ReaderA r arr) where
left (ReaderA f) = ReaderA proc (e, r) -> case e of
Left a -> arr Left . f -< (a, r)
Right b -> returnA -< Right b
instance (ArrowApply arr) => ArrowApply (ReaderA r arr) where
app = ReaderA (app . arr \((ReaderA f, x), r) -> (f, (x, r)))
instance (Arrow arr) => ArrowTrans (ReaderA r) arr where
liftA f = ReaderA (f . arr fst)
instance (Arrow arr) => ArrowReader r (ReaderA r arr) where
askA = ReaderA (arr snd)
localA (ReaderA f) = ReaderA proc ((a, (r, s)), _) -> f -< ((a, s), r)
instance (ArrowKleisli m arr) => ArrowKleisli m (ReaderA r arr) where
arrM = liftA . arrM
instance (ArrowError e arr) => ArrowError e (ReaderA r arr) where
throwA = liftA throwA
catchA (ReaderA f) (ReaderA g) = ReaderA proc ((a, s), r) ->
(f -< ((a, s), r)) `catchA` \e -> g -< ((a, (e, s)), r)
instance (ArrowWriter w arr) => ArrowWriter w (ReaderA r arr) where
tellA = liftA tellA
listenA (ReaderA f) = ReaderA (listenA f)
newtype WriterA w arr a b
-- Internally defined using state passing to avoid space leaks. The real constructor should be
-- left unexported to avoid misuse.
= MkWriterA (arr (a, w) (b, w))
pattern WriterA :: (Monoid w, Arrow arr) => arr a (b, w) -> WriterA w arr a b
pattern WriterA { runWriterA } <- MkWriterA ((\f -> f . arr (, mempty)) -> runWriterA)
where
WriterA f = MkWriterA (arr (\((b, w), w1) -> let !w2 = w1 <> w in (b, w2)) . first f)
{-# COMPLETE WriterA #-}
instance (Category arr) => Category (WriterA w arr) where
id = MkWriterA id
MkWriterA f . MkWriterA g = MkWriterA (f . g)
instance (Arrow arr) => Arrow (WriterA w arr) where
arr f = MkWriterA (arr $ first f)
first (MkWriterA f) = MkWriterA proc ((a1, b), w1) -> do
(a2, w2) <- f -< (a1, w1)
returnA -< ((a2, b), w2)
instance (ArrowChoice arr) => ArrowChoice (WriterA w arr) where
left (MkWriterA f) = MkWriterA proc (e, w) -> case e of
Left a -> arr (first Left) . f -< (a, w)
Right b -> returnA -< (Right b, w)
instance (ArrowApply arr) => ArrowApply (WriterA w arr) where
app = MkWriterA (app . arr \((MkWriterA f, x), w) -> (f, (x, w)))
instance (Arrow arr) => ArrowTrans (WriterA w) arr where
liftA = MkWriterA . first
instance (Monoid w, Arrow arr) => ArrowWriter w (WriterA w arr) where
tellA = MkWriterA $ arr \(w, w1) -> let !w2 = w1 <> w in ((), w2)
listenA (WriterA f) = WriterA (arr (\(a, w) -> ((a, w), w)) . f)
instance (ArrowKleisli m arr) => ArrowKleisli m (WriterA w arr) where
arrM = liftA . arrM
instance (ArrowError e arr) => ArrowError e (WriterA w arr) where
throwA = liftA throwA
catchA (MkWriterA f) (MkWriterA g) = MkWriterA proc ((a, s), w) ->
(f -< ((a, s), w)) `catchA` \e -> g -< ((a, (e, s)), w)
instance (ArrowReader r arr) => ArrowReader r (WriterA w arr) where
askA = liftA askA
localA (MkWriterA f) = MkWriterA proc ((a, (r, s)), w) -> (| localA (f -< ((a, s), w)) |) r
{- Note [Weird control operator types]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Arrow notation (i.e. `proc`) has support for so-called custom control operators, which allow
things like
proc (x, y) -> do
z <- foo -< x
(f -< z) `catchA` \e -> g -< (y, e)
to magically work. Whats so magical about that? Well, note that `catchA` is an ordinary function,
but its being given /commands/ as arguments, not expressions. Also note that the arguments to
`catchA` reference the variables `y` and `z`, which are bound earlier in the `proc` expression as
arrow-local variables.
To make this work, GHC has to thread `y` and `z` through `catchA` in the generated code, which will
end up being something like this:
arr (\(x, y) -> (x, (x, y)))
>>> first foo
>>> arr (\(z, (x, y)) -> (z, y))
>>> catchA (first f)
(arr (\((_, y), e) -> (y, e)) >>> g)
Quite complicated, which is why were glad we dont have to write it all out ourselves!
Unfortunately, since GHC 7.8, GHC has required some pretty stupid-looking types for control
operators to allow them to be used in `proc` notation. The natural type for `catchA` is
catchA :: arr a b -> arr (a, e) b -> arr a b
but GHC requires the far uglier
catchA :: arr (a, s) b -> arr (a, (e, s)) b -> arr (a, s) b
in order to make the type inference work out. I (Alexis) have submitted a GHC proposal to fix this
<https://github.com/ghc-proposals/ghc-proposals/pull/303>, so hopefully well be able to use the
nicer type in the future (GHC 8.12 at the earliest). For now, though, well have to use the ugly
version.
As of GHC 8.10, the way to read arrow control operator types is to look for arguments with a shape
like this:
arr (e, (a1, (a2, ... (an, s)))) b
The actual arguments to the arrow are the `a1` through `an` types, and the `e` and `s` types are
sort of bookends. So if you see a type like
arr (e, (Integer, (Char, (Bool, s)))) String
then you should read it as an arrow that takes three arguments of type `Integer`, `Char`, and
`Bool` and returns a `String`.
Stopping there is basically good enough, but if you want to know whats really going on, the idea is
that each command in a `proc` block has an environment and an argument stack, represented by the
types `e` and `s`, respectively. The environment is used to thread arrow-local variables that are
currently in scope, and the argument stack (as the name implies) is used to pass the command
arguments. Control operators can push and pop things from this argument stack, and in the base case,
the empty argument stack is represented by `()`. For a full explanation, see the section of the GHC
Users Guide on arrow notation:
https://downloads.haskell.org/ghc/8.8.1/docs/html/users_guide/glasgow_exts.html#arrow-notation
Yes, this all kind of sucks. Sorry.
Note [Arrow rewrite rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~
GHCs desugaring of `proc` notation is not very clever, so it can generate some pretty inefficient
code. Almost everything is translated into uses of `arr`, `first`, and `(|||)`, and arrows end up
threading around massive tuples that constantly need to be packed and unpacked. To get good
performance, GHC relies on rewrite rules that expose optimizations to the simplifier, allowing the
packing and unpacking to be significantly reduced.
The most crucial rewrite rules are the ones for `arr` fusion, which rewrite expressions like
`arr f . arr g` into `arr (f . g)`. It might not be obvious at first why this is so important, but
remember that the arguments to `arr` are plain functions, not arrows. These functions might be
something like:
f (a, (b, c)) = ((a, b), c)
g ((a, _), c) = (a, c)
The composition of these functions `f . g` can be optimized to
f . g = \(a, (_, c)) -> (a, c)
skipping the intermediate tuple completely, but GHC can only do that if the two functions are
composed directly. If GHC only sees `arr f . arr g`, then it cant assume anything about `arr`
(which might be overloaded), so it gets stuck.
The rewrite rules defined in Control.Category, Control.Arrow, and this module take advantage of
certain typeclass laws to enable many more optimizations to fire. However, there is a caveat to all
this: when GHC knows the concrete type of a particular arrow, it aggressively specializes uses of
`arr` and other operations to the concrete type. This process bypasses the rewrite rules completely.
GHC tries to warn us about this with the `-Winline-rule-shadowing` warning, but in this case, we
want the rules anyway, since they might fire on polymorphic code. However, the takeaway is that the
generic rules are not sufficient to get fast code: its important to /also/ define type-specific
rules in the event that GHC specializes concrete code. The good news is that those type-specific
rules can take advantage of type-specific optimizations, getting even better performance than would
be possible using the generic rules. The bad news is its a bit more work. -}

View File

@ -4,9 +4,9 @@ module Control.Monad.Unique
, MonadUnique(..)
) where
import Hasura.Prelude
import Hasura.Prelude
import qualified Data.Unique as U
import qualified Data.Unique as U
class (Monad m) => MonadUnique m where
newUnique :: m U.Unique

View File

@ -2,20 +2,23 @@ module Data.HashMap.Strict.Extended
( module M
, catMaybes
, fromListOn
, unionsAll
, groupOn
, groupOnNE
, differenceOn
, lpadZip
) where
import Prelude
import Prelude
import qualified Data.Align as A
import qualified Data.Foldable as F
import qualified Data.Align as A
import qualified Data.Foldable as F
import Data.Function
import Data.Hashable
import Data.HashMap.Strict as M
import Data.These
import Data.Function
import Data.Hashable
import Data.HashMap.Strict as M
import Data.List.NonEmpty (NonEmpty (..))
import Data.These
catMaybes :: HashMap k (Maybe v) -> HashMap k v
catMaybes = M.mapMaybe id
@ -23,6 +26,10 @@ catMaybes = M.mapMaybe id
fromListOn :: (Eq k, Hashable k) => (v -> k) -> [v] -> HashMap k v
fromListOn f = fromList . Prelude.map (\v -> (f v, v))
-- | Like 'M.unions', but keeping all elements in the result.
unionsAll :: (Eq k, Hashable k, Foldable t) => t (HashMap k v) -> HashMap k (NonEmpty v)
unionsAll = F.foldl' (\a b -> unionWith (<>) a (fmap (:|[]) b)) M.empty
-- | Given a 'Foldable' sequence of values and a function that extracts a key from each value,
-- returns a 'HashMap' that maps each key to a list of all values in the sequence for which the
-- given function produced it.
@ -30,7 +37,10 @@ fromListOn f = fromList . Prelude.map (\v -> (f v, v))
-- >>> groupOn (take 1) ["foo", "bar", "baz"]
-- fromList [("f", ["foo"]), ("b", ["bar", "baz"])]
groupOn :: (Eq k, Hashable k, Foldable t) => (v -> k) -> t v -> HashMap k [v]
groupOn f = Prelude.foldr (\v -> M.alter (Just . maybe [v] (v:)) (f v)) M.empty
groupOn f = fmap F.toList . groupOnNE f
groupOnNE :: (Eq k, Hashable k, Foldable t) => (v -> k) -> t v -> HashMap k (NonEmpty v)
groupOnNE f = Prelude.foldr (\v -> M.alter (Just . (v:|) . maybe [] F.toList) (f v)) M.empty
differenceOn :: (Eq k, Hashable k, Foldable t) => (v -> k) -> t v -> t v -> HashMap k v
differenceOn f = M.difference `on` (fromListOn f . F.toList)

View File

@ -20,9 +20,9 @@ module Hasura.Db
) where
import Control.Lens
import Control.Monad.Trans.Control (MonadBaseControl (..))
import Control.Monad.Unique
import Control.Monad.Validate
import Control.Monad.Trans.Control (MonadBaseControl(..))
import qualified Data.Aeson.Extended as J
import qualified Database.PG.Query as Q

View File

@ -15,6 +15,7 @@ import qualified Language.GraphQL.Draft.Syntax as G
import Hasura.GraphQL.Resolve.Types
import Hasura.GraphQL.Validate.Types
import Hasura.RQL.Instances ()
import Hasura.RQL.Types.Permission
import Hasura.Server.Utils (duplicates)
@ -88,6 +89,7 @@ data TableCustomRootFields
, _tcrfUpdate :: !(Maybe G.Name)
, _tcrfDelete :: !(Maybe G.Name)
} deriving (Show, Eq, Lift, Generic)
instance NFData TableCustomRootFields
$(deriveToJSON (aesonDrop 5 snakeCase){omitNothingFields=True} ''TableCustomRootFields)
instance FromJSON TableCustomRootFields where

View File

@ -1,30 +1,29 @@
{-# LANGUAGE Arrows #-}
-- | A simple implementation of /incremental build rules/, which can be used to avoid unnecessary
-- recomputation on incrementally-changing input. See 'Rule' for more details.
module Hasura.Incremental
( Rule
, Result
, rule
, build
, rebuild
, rebuildRule
, result
, mapRule
, mapRuleS
, cache
, cacheWithWriter
, keyed
, ArrowCache(..)
, ArrowDistribute(..)
) where
import Hasura.Prelude hiding (id, (.))
import Hasura.Prelude hiding (id, (.))
import qualified Data.HashMap.Strict as M
import qualified Data.HashMap.Strict as M
import Control.Applicative
import Control.Arrow
import Control.Arrow.Extended
import Control.Category
import Data.Profunctor
import Data.Tuple (swap)
-- | A value of type @'Rule' m a b@ is a /build rule/: a computation that describes how to build a
-- value of type @b@ from a value of type @a@ in a monad @m@. What distinguishes @'Rule' m a b@ from
@ -40,7 +39,7 @@ import Data.Profunctor
-- incrementalization is not supported inside those rules — they are treated as a single, monolithic
-- computation.
--
-- Atomic rules are created with the 'rule' function, and caching can be added to a rule using the
-- Atomic rules are created with the 'arrM' function, and caching can be added to a rule using the
-- 'cache' combinator. Rules can be executed using the 'build' function, which returns a 'Result'. A
-- 'Result' contains the built value, accessible via 'result', but it also allows supplying a new
-- input value using 'rebuild' to produce a new result incrementally.
@ -48,25 +47,11 @@ newtype Rule m a b
= Rule { build :: a -> m (Result m a b) }
deriving (Functor)
-- | Creates a 'Rule' that produces an @b@ from an @a@ using the given monadic function. No caching
-- is applied by default, so the rule will be re-executed on every subsequent rebuild unless it is
-- explicitly wrapped in 'cache'.
rule :: (Functor m) => (a -> m b) -> Rule m a b
rule f = Rule $ \input -> f input <&> \result ->
Result { rebuild = build (rule f), result }
-- | Modifies a 'Rule' by applying a natural transformation.
mapRule :: (Functor n) => (forall r. m r -> n r) -> Rule m a b -> Rule n a b
mapRule f rule' = Rule \input -> f (build rule' input) <&> \result' ->
mapRule f rule = Rule \input -> f (build rule input) <&> \result' ->
result' { rebuild = build (mapRule f (Rule $ rebuild result')) }
-- | Like 'mapRule', but the transformation can produce an extra piece of state in the result. This
-- is most useful for running monad transformers like 'runWriterT' that accumulate extra information
-- during execution (but note the caveats about caching noted in the documentation for 'cache').
mapRuleS :: (Functor n) => (forall r. m r -> n (r, s)) -> Rule m a b -> Rule n a (b, s)
mapRuleS f rule' = Rule \input -> f (build rule' input) <&> \(Result { rebuild, result }, s) ->
Result { rebuild = build (mapRuleS f (Rule rebuild)), result = (result, s) }
instance (Applicative m) => Applicative (Rule m a) where
pure a = Rule . const . pure $ pure a
rule1 <*> rule2 = Rule $ \input -> liftA2 (<*>) (build rule1 input) (build rule2 input)
@ -74,52 +59,115 @@ instance (Applicative m) => Applicative (Rule m a) where
instance (Functor m) => Profunctor (Rule m) where
dimap f g (Rule build) = Rule (fmap (dimap f g) . build . f)
ruleArrM :: (Functor m) => (a -> m b) -> Rule m a b
ruleArrM f = Rule $ fix \build -> fmap (Result build) . f
{-# INLINABLE[0] ruleArrM #-}
ruleCompose :: (Monad m) => Rule m b c -> Rule m a b -> Rule m a c
ruleCompose rule2 rule1 = Rule $ \input -> do
result1 <- build rule1 input
result2 <- build rule2 (result result1)
pure $ Result
{ rebuild = build (Rule (rebuild result2) `ruleCompose` Rule (rebuild result1))
, result = result result2
}
{-# INLINABLE[0] ruleCompose #-}
ruleFirst :: (Functor m) => Rule m a b -> Rule m (a, c) (b, c)
ruleFirst (Rule build) = Rule $ \(a, b) -> resultFirst b <$> build a
where
resultFirst b Result { rebuild, result } = Result
{ rebuild = \(a, b') -> resultFirst b' <$> rebuild a
, result = (result, b)
}
{-# INLINABLE[0] ruleFirst #-}
-- This is significantly trickier to implement than 'first'! Heres how to think about it: the first
-- time the rule executes, we know nothing about previous runs, so if were given 'Left', we have to
-- call the original rule were given. At that point, as long as we are still given 'Left' on every
-- rebuild, we can take advantage of whatever caching happened on the previous run, so we keep
-- recursively calling 'leftResult'.
--
-- However, as soon as we get 'Right', we have to bail out. We return the input were given, and we
-- forget about any previous executions of the rule completely. If were given 'Left' on a
-- subsequent rebuild, we start over from the original rule again.
ruleLeft :: (Applicative m) => Rule m a b -> Rule m (Either a c) (Either b c)
ruleLeft (Rule build) = Rule eitherResult
where
eitherResult = either (fmap leftResult . build) rightResult
leftResult Result { rebuild, result } = Result
{ rebuild = either (fmap leftResult . rebuild) rightResult
, result = Left result
}
rightResult input = pure Result
{ rebuild = eitherResult
, result = Right input
}
{-# INLINABLE[0] ruleLeft #-}
firstM :: (Functor m) => (a -> m b) -> (a, c) -> m (b, c)
firstM f (a, b) = (, b) <$> f a
{-# INLINABLE firstM #-}
leftM :: (Applicative m) => (a -> m b) -> Either a c -> m (Either b c)
leftM f = \case
Left a -> Left <$> f a
Right b -> pure $ Right b
{-# INLINABLE leftM #-}
{-# RULES -- see Note [Rule rewrite rules]
"Rule/associate" forall f g h. f `ruleCompose` (g `ruleCompose` h) = (f `ruleCompose` g) `ruleCompose` h
"Rule/arrM/arrM" forall f g. ruleArrM f `ruleCompose` ruleArrM g = ruleArrM (f <=< g)
"Rule/arrM/arrM/R" forall f g h. ruleArrM f `ruleCompose` (ruleArrM g `ruleCompose` h) = ruleArrM (f <=< g) `ruleCompose` h
"Rule/arrM/arrM/L" forall f g h. (f `ruleCompose` ruleArrM g) `ruleCompose` ruleArrM h = f `ruleCompose` ruleArrM (g <=< h)
"Rule/first/arrM" forall f. ruleFirst (ruleArrM f) = ruleArrM (firstM f)
"Rule/left/arrM" forall f. ruleLeft (ruleArrM f) = ruleArrM (leftM f)
"Rule/first/push" [~1] forall f g. ruleFirst (f `ruleCompose` g) = ruleFirst f `ruleCompose` ruleFirst g
"Rule/left/push" [~1] forall f g. ruleLeft (f `ruleCompose` g) = ruleLeft f `ruleCompose` ruleLeft g
"Rule/first/pull" [1] forall f g. ruleFirst f `ruleCompose` ruleFirst g = ruleFirst (f `ruleCompose` g)
"Rule/left/pull" [1] forall f g. ruleLeft f `ruleCompose` ruleLeft g = ruleLeft (f `ruleCompose` g)
#-}
instance (Functor m) => Strong (Rule m) where
first' (Rule build) = Rule $ \(a, b) -> resultFirst b <$> build a
where
resultFirst b Result { rebuild, result } = Result
{ rebuild = \(a, b') -> resultFirst b' <$> rebuild a
, result = (result, b)
}
first' = ruleFirst
{-# INLINE first' #-}
instance (Applicative m) => Choice (Rule m) where
-- This is significantly trickier to implement than 'first'! Heres how to think about it: the
-- first time the rule executes, we know nothing about previous runs, so if were given 'Left',
-- we have to call the original rule were given. At that point, as long as we are still given
-- 'Left' on every rebuild, we can take advantage of whatever caching happened on the previous
-- run, so we keep recursively calling 'leftResult'.
--
-- However, as soon as we get 'Right', we have to bail out. We return the input were given, and
-- we forget about any previous executions of the rule completely. If were given 'Left' on a
-- subsequent rebuild, we start over from the original rule again.
left' (Rule build) = Rule eitherResult
where
eitherResult = either (fmap leftResult . build) rightResult
leftResult Result { rebuild, result } = Result
{ rebuild = either (fmap leftResult . rebuild) rightResult
, result = Left result
}
rightResult input = pure $ Result
{ rebuild = eitherResult
, result = Right input
}
left' = ruleLeft
{-# INLINE left' #-}
instance (Monad m) => Category (Rule m) where
id = Rule . fix $ \build -> pure . Result build
rule2 . rule1 = Rule $ \input -> do
result1 <- build rule1 input
result2 <- build rule2 (result result1)
pure $ Result
{ rebuild = build (Rule (rebuild result2) . Rule (rebuild result1))
, result = result result2
}
id = arrM pure
{-# INLINE id #-}
(.) = ruleCompose
{-# INLINE (.) #-}
instance (Monad m) => Arrow (Rule m) where
arr f = Rule . fix $ \build -> pure . Result build . f
first = first'
arr f = arrM (pure . f)
{-# INLINE arr #-}
first = ruleFirst
{-# INLINE first #-}
second f = arr swap . first f . arr swap
{-# INLINE second #-}
f *** g = second g . first f
{-# INLINE (***) #-}
f &&& g = (f *** g) . arr (\x -> (x, x))
{-# INLINE (&&&) #-}
instance (Monad m) => ArrowChoice (Rule m) where
left = left'
left = ruleLeft
{-# INLINE left #-}
right f = arr (either Right Left) . ruleLeft f . arr (either Right Left)
{-# INLINE right #-}
f +++ g = right g . left f
{-# INLINE (+++) #-}
f ||| g = arr (either id id) . (f +++ g)
{-# INLINE (|||) #-}
instance (Monad m) => ArrowKleisli m (Rule m) where
arrM = ruleArrM
{-# INLINE arrM #-}
data Result m a b
= Result
@ -146,81 +194,120 @@ instance (Functor m) => Profunctor (Result m) where
, result = g result
}
-- | Adds equality-based caching to the given rule. After each execution of the rule, its input and
-- result values are cached. On the next rebuild, the input value is compared via '==' to the
-- previous input value. If they are the same, the previous build result is returned /without/
-- re-executing the rule. Otherwise, the old cached values are discarded, and the rule is
-- re-executed to produce a new set of cached values.
--
-- Indescriminate use of 'cache' is likely to have little effect except to increase memory usage,
-- since the input and result of each rule execution must be retained in memory. Avoid using 'cache'
-- around rules with large input or output that is likely to change often unless profiling
-- indicates it is computationally expensive enough to be worth the memory overhead.
--
-- __Note that only direct inputs and outputs of a 'Rule' are cached.__ It is extremely important to
-- take care in your choice of the base monad @m@:
--
-- * Monads that provide access to extra information through a side-channel, such as 'ReaderT',
-- 'StateT', or 'IO', will __not__ expose that information to dependency analysis. If that
-- information changes between builds, but the rules direct inputs remain unchanged, the rule
-- will __not__ be re-executed.
--
-- * Dually, monads that perform side-effects as part of execution, such as 'StateT', 'WriterT',
-- or 'IO', will __not__ have their side-effects automatically replayed if the cached result is
-- used. If the side effects are only necessary to change some state to bring it in line with
-- the updated inputs, that is entirely fine (and likely even desirable), but if the
-- side-effects are necessary to produce each result, caching will lead to incorrect behavior.
--
-- The safest monad to use for @m@ is therefore 'Identity', which suffers neither of the above
-- problems by construction. However, in practice, it is highly desirable to be able to execute
-- rules that may perform effects such as raising errors, accumulating information, or modifying
-- external state, so the capability is exposed. See also
--
-- See also 'cacheWithWriter' for a variant of 'cache' that cooperates with 'MonadWriter' to allow
-- safe use of accumulative state.
cache :: forall a b m. (Eq a, Applicative m) => Rule m a b -> Rule m a b
cache (Rule build) = Rule $ \input -> cacheResult input <$> build input
where
cacheResult :: a -> Result m a b -> Result m a b
cacheResult oldInput Result { rebuild, result } = fix $ \cachedBuild -> Result
{ rebuild = \newInput -> if
| oldInput == newInput -> pure cachedBuild
| otherwise -> cacheResult newInput <$> rebuild newInput
, result
}
class (Arrow arr) => ArrowCache arr where
-- | Adds equality-based caching to the given rule. After each execution of the rule, its input
-- and result values are cached. On the next rebuild, the input value is compared via '==' to the
-- previous input value. If they are the same, the previous build result is returned /without/
-- re-executing the rule. Otherwise, the old cached values are discarded, and the rule is
-- re-executed to produce a new set of cached values.
--
-- Indescriminate use of 'cache' is likely to have little effect except to increase memory usage,
-- since the input and result of each rule execution must be retained in memory. Avoid using
-- 'cache' around rules with large input or output that is likely to change often unless profiling
-- indicates it is computationally expensive enough to be worth the memory overhead.
--
-- __Note that only direct inputs and outputs of a 'Rule' are cached.__ It is extremely important
-- to take care in your choice of the base monad @m@:
--
-- * Monads that provide access to extra information through a side-channel, such as 'ReaderT',
-- 'StateT', or 'IO', will __not__ expose that information to dependency analysis. If that
-- information changes between builds, but the rules direct inputs remain unchanged, the rule
-- will __not__ be re-executed.
--
-- * Dually, monads that perform side-effects as part of execution, such as 'StateT', 'WriterT',
-- or 'IO', will __not__ have their side-effects automatically replayed if the cached result
-- is used. If the side effects are only necessary to change some state to bring it in line
-- with the updated inputs, that is entirely fine (and likely even desirable), but if the
-- side-effects are necessary to produce each result, caching will lead to incorrect behavior.
--
-- The safest monad to use for @m@ is therefore 'Identity', which suffers neither of the above
-- problems by construction. However, in practice, it is highly desirable to be able to execute
-- rules that may perform side-effects in 'IO', so the capability is exposed.
--
-- For a safe way to use other effects with 'Rule', use arrow transformers like 'ErrorA',
-- 'ReaderA', and 'WriterA' on top of a base @'Rule' m@ arrow. Such uses are completely safe, as
-- the extra information added by other transformers /will/ be exposed to dependency analysis and
-- /will/ be cached.
cache :: (Eq a) => arr a b -> arr a b
-- | Like 'cache', but safe to use with 'MonadWriter'. Any uses of 'tell' during the rule execution
-- will be captured and cached alongside the resulting value, and they will be effectively replayed
-- whenever the cached value is used.
cacheWithWriter :: forall a b m w. (Eq a, MonadWriter w m) => Rule m a b -> Rule m a b
cacheWithWriter (Rule build) = Rule $ \input -> cacheResult input <$> listen (build input)
where
cacheResult :: a -> (Result m a b, w) -> Result m a b
cacheResult oldInput (Result { rebuild, result }, capturedLog) = fix $ \cachedBuild -> Result
{ rebuild = \newInput -> if
| oldInput == newInput -> tell capturedLog $> cachedBuild
| otherwise -> cacheResult newInput <$> listen (rebuild newInput)
, result
}
instance (ArrowChoice arr, ArrowCache arr) => ArrowCache (ErrorA e arr) where
cache (ErrorA f) = ErrorA (cache f)
instance (Monoid w, ArrowCache arr) => ArrowCache (WriterA w arr) where
cache (WriterA f) = WriterA (cache f)
-- | Given a 'Rule' that operates on key-value pairs, produces a 'Rule' that operates on a
-- 'M.HashMap'. If the input rule is incremental in its argument, the resulting rule will be
-- incremental as well for any entries in the map that do not change between builds.
keyed
:: forall a b k m. (Eq k, Hashable k, Applicative m)
=> Rule m (k, a) b -> Rule m (M.HashMap k a) (M.HashMap k b)
keyed baseRule = buildWith M.empty
where
buildWith :: M.HashMap k (Rule m a b) -> Rule m (M.HashMap k a) (M.HashMap k b)
buildWith !ruleMap = Rule $ \valueMap ->
M.traverseWithKey processEntry valueMap <&> \resultMap -> Result
{ rebuild = build (buildWith (Rule . rebuild <$> resultMap))
, result = result <$> resultMap
instance (Monad m) => ArrowCache (Rule m) where
cache :: forall a b. (Eq a) => Rule m a b -> Rule m a b
cache (Rule build) = Rule \input -> cacheResult input <$> build input
where
cacheResult :: a -> Result m a b -> Result m a b
cacheResult oldInput Result { rebuild, result } = fix \cachedBuild -> Result
{ rebuild = \newInput -> if
| oldInput == newInput -> pure cachedBuild
| otherwise -> cacheResult newInput <$> rebuild newInput
, result
}
where
processEntry :: k -> a -> m (Result m a b)
processEntry k v =
let ruleForKey = case M.lookup k ruleMap of
Just existingRule -> existingRule
Nothing -> lmap (k,) baseRule
in build ruleForKey v
class (ArrowChoice arr) => ArrowDistribute arr where
-- | Given a 'Rule' that operates on key-value pairs, produces a 'Rule' that operates on a
-- 'M.HashMap'. If the input rule is incremental in its argument, the resulting rule will be
-- incremental as well for any entries in the map that do not change between builds.
--
-- TODO: Laws that capture order-independence.
--
-- This is intended to be used as a control operator in @proc@ notation; see
-- Note [Weird control operator types] in "Control.Arrow.Extended".
keyed
:: (Eq k, Hashable k)
=> arr (e, (k, (a, s))) b
-> arr (e, (M.HashMap k a, s)) (M.HashMap k b)
-- Note that 'ErrorA' does /not/ support an instance of 'ArrowDistribute', as it is impossible to
-- define an instance that short-circuits on the first error! A hypothetical 'ErrorsA' could support
-- such an instance, however, as it could combine all the errors produced by each branch.
instance (Monoid w, ArrowDistribute arr) => ArrowDistribute (WriterA w arr) where
keyed (WriterA f) = WriterA (arr (swap . sequence . fmap swap) <<< keyed f)
instance (Monad m) => ArrowDistribute (Rule m) where
keyed
:: forall a b k e s. (Eq k, Hashable k)
=> Rule m (e, (k, (a, s))) b
-> Rule m (e, (M.HashMap k a, s)) (M.HashMap k b)
keyed entryRule = buildWith M.empty
where
buildWith
:: M.HashMap k (Rule m (e, (a, s)) b)
-> Rule m (e, (M.HashMap k a, s)) (M.HashMap k b)
buildWith !ruleMap = Rule \(e, (valueMap, s)) ->
M.traverseWithKey (processEntry e s) valueMap <&> \resultMap -> Result
{ rebuild = build (buildWith (Rule . rebuild <$> resultMap))
, result = result <$> resultMap
}
where
processEntry :: e -> s -> k -> a -> m (Result m (e, (a, s)) b)
processEntry e s k v = build (ruleForKey k) (e, (v, s))
ruleForKey :: k -> Rule m (e, (a, s)) b
ruleForKey k = case M.lookup k ruleMap of
Just existingRule -> existingRule
Nothing -> lmap (\(e, (v, s)) -> (e, (k, (v, s)))) entryRule
{- Note [Rule rewrite rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As explained by Note [Arrow rewrite rules] in Control.Arrow.Extended, its important to define
type-specific rewrite rules to get good performance with arrows. This is especially important for
`Rule`, since the recursive definitions of operations like `.` and `arr` are very difficult for the
optimizer to deal with, and the composition of lots of small rules created with `arr` is very
inefficient.
Fortunately, efficient rules for `Rule` arent too hard. The idea is to define all the operations in
terms of a small set of primitives: `.`, `arrM`, `first`, and `left`. Then we can introduce rules
for `arrM` fusion, and the arguments to `arrM` are just plain old monadic actions, which GHC is
really good at optimizing already. This doesnt get rid of uses of `.` entirely, but it
significantly reduces them.
Since GHC aggressively specializes and inlines class methods, the rules cannot be defined on the
class methods themselves. Instead, the class methods expand to auxiliary definitions, and those
definitions include an INLINABLE[0] pragma that ensures they do not inline until the final
optimization phase. The rules are defined in terms of those definitions, so they will be able to do
their work in prior phases. -}

View File

@ -13,50 +13,53 @@ module Hasura.Prelude
, mapFromL
) where
import Control.Applicative as M (Alternative (..))
import Control.Arrow as M (first, second, (&&&), (***), (>>>), (<<<))
import Control.Monad as M (void, when)
import Control.Monad.Base as M
import Control.Monad.Except as M
import Control.Monad.Fail as M (MonadFail)
import Control.Monad.Identity as M
import Control.Monad.Reader as M
import Control.Monad.State.Strict as M
import Control.Monad.Writer.Strict as M
import Data.Bool as M (bool)
import Data.Data as M (Data (..))
import Data.Either as M (lefts, partitionEithers, rights)
import Data.Foldable as M (asum, foldrM, for_, toList, traverse_)
import Data.Function as M (on, (&))
import Data.Functor as M (($>), (<&>))
import Data.Hashable as M (Hashable)
import Data.List as M (find, findIndex, foldl', group, intercalate,
intersect, lookup, sort, sortBy, sortOn, union,
unionBy, (\\))
import Data.Maybe as M (catMaybes, fromMaybe, isJust, isNothing,
listToMaybe, mapMaybe, maybeToList)
import Data.Ord as M (comparing)
import Data.Semigroup as M (Semigroup (..))
import Data.Sequence as M (Seq)
import Data.String as M (IsString)
import Data.Text as M (Text)
import Data.Traversable as M (for)
import Data.Word as M (Word64)
import GHC.Generics as M (Generic)
import Prelude as M hiding (fail, init, lookup)
import Text.Read as M (readEither, readMaybe)
import Control.Applicative as M (Alternative (..))
import Control.Arrow as M (first, second, (&&&), (***), (<<<), (>>>))
import Control.DeepSeq as M (NFData, deepseq, force)
import Control.Monad as M (void, when)
import Control.Monad.Base as M
import Control.Monad.Except as M
import Control.Monad.Fail as M (MonadFail)
import Control.Monad.Identity as M
import Control.Monad.Reader as M
import Control.Monad.State.Strict as M
import Control.Monad.Writer.Strict as M
import Data.Align as M (Align (align, alignWith))
import Data.Align.Key as M (AlignWithKey (..))
import Data.Bool as M (bool)
import Data.Data as M (Data (..))
import Data.Either as M (lefts, partitionEithers, rights)
import Data.Foldable as M (asum, foldrM, for_, toList, traverse_)
import Data.Function as M (on, (&))
import Data.Functor as M (($>), (<&>))
import Data.Hashable as M (Hashable)
import Data.HashMap.Strict as M (HashMap)
import Data.HashSet as M (HashSet)
import Data.List as M (find, findIndex, foldl', group,
intercalate, intersect, lookup, sort,
sortBy, sortOn, union, unionBy, (\\))
import Data.List.NonEmpty as M (NonEmpty(..))
import Data.Maybe as M (catMaybes, fromMaybe, isJust, isNothing,
listToMaybe, mapMaybe, maybeToList)
import Data.Ord as M (comparing)
import Data.Semigroup as M (Semigroup (..))
import Data.Sequence as M (Seq)
import Data.String as M (IsString)
import Data.Text as M (Text)
import Data.These as M (These (..), fromThese, mergeThese,
mergeTheseWith, these)
import Data.Traversable as M (for)
import Data.Word as M (Word64)
import GHC.Generics as M (Generic)
import Prelude as M hiding (fail, init, lookup)
import Test.QuickCheck.Arbitrary.Generic as M
import Data.These as M (These(..), these, fromThese, mergeThese, mergeTheseWith)
import Data.Align as M (Align(align, alignWith))
import Data.Align.Key as M (AlignWithKey(..))
import Data.HashMap.Strict as M (HashMap)
import Data.HashSet as M (HashSet)
import Text.Read as M (readEither, readMaybe)
import qualified Data.ByteString as B
import qualified Data.HashMap.Strict as Map
import qualified Data.ByteString as B
import qualified Data.HashMap.Strict as Map
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
import qualified Test.QuickCheck as QC
alphaNumerics :: String

View File

@ -29,16 +29,17 @@ import Data.Aeson.TH
import Language.Haskell.TH.Syntax (Lift)
import qualified Control.Monad.Validate as MV
import qualified Data.HashSet as S
import qualified Data.Sequence as Seq
import qualified Database.PG.Query as Q
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Data.HashSet as S
data ComputedFieldDefinition
= ComputedFieldDefinition
{ _cfdFunction :: !QualifiedFunction
, _cfdTableArgument :: !(Maybe FunctionArgName)
} deriving (Show, Eq, Lift, Generic)
instance NFData ComputedFieldDefinition
$(deriveJSON (aesonDrop 4 snakeCase) ''ComputedFieldDefinition)
data AddComputedField
@ -47,7 +48,8 @@ data AddComputedField
, _afcName :: !ComputedFieldName
, _afcDefinition :: !ComputedFieldDefinition
, _afcComment :: !(Maybe Text)
} deriving (Show, Eq, Lift)
} deriving (Show, Eq, Lift, Generic)
instance NFData AddComputedField
$(deriveJSON (aesonDrop 4 snakeCase) ''AddComputedField)
runAddComputedField :: (MonadTx m, CacheRWM m) => AddComputedField -> m EncJSON

View File

@ -12,14 +12,14 @@ import qualified Data.Text as T
data HeaderConf = HeaderConf HeaderName HeaderValue
deriving (Show, Eq, Lift, Generic)
instance NFData HeaderConf
instance Hashable HeaderConf
type HeaderName = T.Text
data HeaderValue = HVValue T.Text | HVEnv T.Text
deriving (Show, Eq, Lift, Generic)
instance NFData HeaderValue
instance Hashable HeaderValue
instance FromJSON HeaderConf where

View File

@ -28,8 +28,7 @@ import Hasura.RQL.DDL.EventTrigger (delEventTriggerFromCatalog,
subTableP2)
import Hasura.RQL.DDL.Metadata.Types
import Hasura.RQL.DDL.Permission.Internal (dropPermFromCatalog)
import Hasura.RQL.DDL.RemoteSchema (addRemoteSchemaP2,
removeRemoteSchemaFromCatalog)
import Hasura.RQL.DDL.RemoteSchema (addRemoteSchemaP2, removeRemoteSchemaFromCatalog)
import Hasura.RQL.Types
import Hasura.SQL.Types
@ -405,7 +404,7 @@ runDropInconsistentMetadata
=> DropInconsistentMetadata -> m EncJSON
runDropInconsistentMetadata _ = do
sc <- askSchemaCache
let inconsSchObjs = map (_moId . _imoObject) $ scInconsistentObjs sc
let inconsSchObjs = L.nub . concatMap imObjectIds $ scInconsistentObjs sc
-- Note: when building the schema cache, we try to put dependents after their dependencies in the
-- list of inconsistent objects, so reverse the list to start with dependents first. This is not
-- perfect — a completely accurate solution would require performing a topological sort — but it

View File

@ -105,7 +105,7 @@ dropView vn =
Q.unitQ dropViewS () False
where
dropViewS = Q.fromBuilder $
"DROP VIEW " <> toSQL vn
"DROP VIEW IF EXISTS " <> toSQL vn
procSetObj
:: (QErrM m)
@ -161,6 +161,7 @@ buildInsInfra tn (InsPermInfo _ vn be _ _) = do
let trigFnQ = buildInsTrigFn vn tn $ toSQLBoolExp (S.QualVar "NEW") resolvedBoolExp
Q.catchE defaultTxErrorHandler $ do
-- Create the view
dropView vn
Q.unitQ (buildView tn vn) () False
-- Inject defaults on the view
Q.discardQ (injectDefaults vn tn) () False

View File

@ -58,10 +58,10 @@ import Hasura.Server.Utils (matchRegex)
data RunSQL
= RunSQL
{ rSql :: Text
, rCascade :: !Bool
, rCheckMetadataConsistency :: !(Maybe Bool)
, rTxAccessMode :: !Q.TxAccess
{ rSql :: Text
, rCascade :: !Bool
, rCheckMetadataConsistency :: !(Maybe Bool)
, rTxAccessMode :: !Q.TxAccess
} deriving (Show, Eq, Lift)
instance FromJSON RunSQL where
@ -81,7 +81,7 @@ instance ToJSON RunSQL where
, "check_metadata_consistency" .= rCheckMetadataConsistency
, "read_only" .=
case rTxAccessMode of
Q.ReadOnly -> True
Q.ReadOnly -> True
Q.ReadWrite -> False
]

View File

@ -22,17 +22,20 @@ import Hasura.Prelude
import qualified Data.HashMap.Strict.Extended as M
import qualified Data.HashSet as HS
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Data.Sequence as Seq
import qualified Database.PG.Query as Q
import Control.Arrow.Extended
import Control.Lens hiding ((.=))
import Control.Monad.Unique
import Data.Aeson
import Control.Arrow
import Control.Lens hiding ((.=))
import Control.Monad.Unique
-- import Data.IORef
import Data.List (nub)
-- import Data.Time.Clock
import qualified Hasura.GraphQL.Context as GC
import qualified Hasura.GraphQL.Schema as GS
import qualified Hasura.GraphQL.Context as GC
import qualified Hasura.Incremental as Inc
import Hasura.Db
@ -63,7 +66,7 @@ type InvalidationKey = Unique
data BuildInputs
= BuildInputs
{ _biReason :: !BuildReason
{ _biReason :: !BuildReason
, _biCatalogMetadata :: !CatalogMetadata
, _biInvalidationMap :: !InvalidationMap
} deriving (Eq)
@ -94,6 +97,7 @@ data RebuildableSchemaCache m
}
$(makeLensesFor [("_rscInvalidationMap", "rscInvalidationMap")] ''RebuildableSchemaCache)
{-# INLINABLE buildRebuildableSchemaCache #-} -- see Note [Specialization of buildRebuildableSchemaCache]
buildRebuildableSchemaCache
:: (MonadIO m, MonadTx m, HasHttpManager m, HasSQLGenCtx m)
=> m (RebuildableSchemaCache m)
@ -120,11 +124,19 @@ instance (Monad m) => CacheRM (CacheRWT m) where
instance (MonadIO m, MonadTx m, MonadUnique m) => CacheRWM (CacheRWT m) where
buildSchemaCacheWithOptions buildReason = CacheRWT do
-- startTime <- liftIO getCurrentTime
RebuildableSchemaCache _ invalidationMap rule <- get
catalogMetadata <- liftTx fetchCatalogData
-- afterFetchTime <- liftIO getCurrentTime
-- liftIO $ putStrLn $ "--> [fetch] " <> show (afterFetchTime `diffUTCTime` startTime)
result <- lift $ flip runReaderT buildReason $ Inc.build rule (catalogMetadata, invalidationMap)
let schemaCache = Inc.result result
prunedInvalidationMap = pruneInvalidationMap schemaCache invalidationMap
let !schemaCache = Inc.result result
-- afterBuildTime <- liftIO getCurrentTime
-- liftIO $ putStrLn $ "--> [build] " <> show (afterBuildTime `diffUTCTime` afterFetchTime)
let !prunedInvalidationMap = pruneInvalidationMap schemaCache invalidationMap
-- afterPruneTime <- liftIO getCurrentTime
-- liftIO $ putStrLn $ "--> [prune] " <> show (afterPruneTime `diffUTCTime` afterBuildTime)
-- liftIO $ putStrLn $ "[TOTAL] " <> show (afterPruneTime `diffUTCTime` startTime)
put $ RebuildableSchemaCache schemaCache prunedInvalidationMap (Inc.rebuildRule result)
where
pruneInvalidationMap schemaCache = M.filterWithKey \name _ ->
@ -134,13 +146,15 @@ instance (MonadIO m, MonadTx m, MonadUnique m) => CacheRWM (CacheRWT m) where
unique <- newUnique
assign (rscInvalidationMap . at name) (Just unique)
{-# INLINABLE buildSchemaCacheRule #-} -- see Note [Specialization of buildRebuildableSchemaCache]
buildSchemaCacheRule
-- Note: by supplying BuildReason via MonadReader, it does not participate in caching, which is
-- what we want!
:: (MonadIO m, MonadTx m, MonadReader BuildReason m, HasHttpManager m, HasSQLGenCtx m)
=> Inc.Rule m (CatalogMetadata, InvalidationMap) SchemaCache
:: ( Inc.ArrowCache arr, Inc.ArrowDistribute arr, ArrowKleisli m arr
, MonadIO m, MonadTx m, MonadReader BuildReason m, HasHttpManager m, HasSQLGenCtx m )
=> (CatalogMetadata, InvalidationMap) `arr` SchemaCache
buildSchemaCacheRule = proc inputs -> do
(outputs, collectedInfo) <- Inc.mapRuleS runWriterT buildAndCollectInfo -< inputs
(outputs, collectedInfo) <- runWriterA buildAndCollectInfo -< inputs
let (inconsistentObjects, unresolvedDependencies) = partitionCollectedInfo collectedInfo
(resolvedOutputs, extraInconsistentObjects, resolvedDependencies) <-
resolveDependencies -< (outputs, unresolvedDependencies)
@ -156,65 +170,73 @@ buildSchemaCacheRule = proc inputs -> do
}
where
buildAndCollectInfo
:: ( MonadIO m
, MonadTx m
, MonadReader BuildReason m
, MonadWriter (Seq CollectedInfo) m
, HasHttpManager m
, HasSQLGenCtx m
)
=> Inc.Rule m (CatalogMetadata, InvalidationMap) BuildOutputs
buildAndCollectInfo = Inc.rule \(catalogMetadata, invalidationMap) -> do
buildReason <- ask
when (buildReason == CatalogUpdate) $
liftTx $ Q.catchE defaultTxErrorHandler clearHdbViews
:: ( Inc.ArrowCache arr, Inc.ArrowDistribute arr, ArrowKleisli m arr
, ArrowWriter (Seq CollectedInfo) arr, MonadIO m, MonadTx m, MonadReader BuildReason m
, HasHttpManager m, HasSQLGenCtx m )
=> (CatalogMetadata, InvalidationMap) `arr` BuildOutputs
buildAndCollectInfo = proc (catalogMetadata, invalidationMap) -> do
let CatalogMetadata tables relationships permissions
eventTriggers remoteSchemas functions fkeys' allowlistDefs
computedFields = catalogMetadata
-- tables
tableRawInfos <- buildTableCache tables
tableRawInfos <- buildTableCache -< tables
let tableNames = HS.fromList $ M.keys tableRawInfos
-- relationships and computed fields
let relationshipsByTable = M.groupOn _crTable relationships
computedFieldsByTable = M.groupOn (_afcTable . _cccComputedField) computedFields
fkeys = HS.fromList fkeys'
tableCoreInfos <- tableRawInfos
& alignExtraTableInfo mkRelationshipMetadataObject relationshipsByTable
>>= alignExtraTableInfo mkComputedFieldMetadataObject computedFieldsByTable
>>= traverse \((tableRawInfo, tableRelationships), tableComputedFields) -> do
let name = _tciName tableRawInfo
columns = _tciFieldInfoMap tableRawInfo
allFields <- addNonColumnFields fkeys tableNames name columns tableRelationships tableComputedFields
pure tableRawInfo { _tciFieldInfoMap = allFields }
tableCoreInfos <- (tableRawInfos >- returnA)
>-> (\info -> (info, relationshipsByTable) >- alignExtraTableInfo mkRelationshipMetadataObject)
>-> (\info -> (info, computedFieldsByTable) >- alignExtraTableInfo mkComputedFieldMetadataObject)
>-> (| Inc.keyed (\_ ((tableRawInfo, tableRelationships), tableComputedFields) -> do
let columns = _tciFieldInfoMap tableRawInfo
allFields <- addNonColumnFields -<
(fkeys, tableNames, columns, tableRelationships, tableComputedFields)
returnA -< tableRawInfo { _tciFieldInfoMap = allFields }) |)
-- permissions and event triggers
tableCache <- tableCoreInfos
& alignExtraTableInfo mkPermissionMetadataObject (M.groupOn _cpTable permissions)
>>= alignExtraTableInfo mkEventTriggerMetadataObject (M.groupOn _cetTable eventTriggers)
>>= traverse \((tableCoreInfo, tablePermissions), tableEventTriggers) -> do
permissionInfos <- buildTablePermissions tableCoreInfos tableCoreInfo tablePermissions
eventTriggerInfos <- buildTableEventTriggers tableCoreInfos tableEventTriggers
pure TableInfo
{ _tiCoreInfo = tableCoreInfo
, _tiRolePermInfoMap = permissionInfos
, _tiEventTriggerInfoMap = eventTriggerInfos
}
-- permTimeRef <- bindA -< liftIO $ newIORef 0
-- eventTimeRef <- bindA -< liftIO $ newIORef 0
tableCache <- (tableCoreInfos >- returnA)
>-> (\info -> (info, M.groupOn _cpTable permissions) >- alignExtraTableInfo mkPermissionMetadataObject)
>-> (\info -> (info, M.groupOn _cetTable eventTriggers) >- alignExtraTableInfo mkEventTriggerMetadataObject)
>-> (| Inc.keyed (\_ ((tableCoreInfo, tablePermissions), tableEventTriggers) -> do
-- startTime <- bindA -< liftIO getCurrentTime
permissionInfos <- buildTablePermissions -< (tableCoreInfos, tableCoreInfo, tablePermissions)
-- afterPermTime <- bindA -< liftIO getCurrentTime
eventTriggerInfos <- buildTableEventTriggers -< (tableCoreInfos, tableEventTriggers)
-- afterEventsTime <- bindA -< liftIO getCurrentTime
-- bindA -< liftIO $ modifyIORef' permTimeRef (+ (afterPermTime `diffUTCTime` startTime))
-- bindA -< liftIO $ modifyIORef' eventTimeRef (+ (afterEventsTime `diffUTCTime` afterPermTime))
returnA -< TableInfo
{ _tiCoreInfo = tableCoreInfo
, _tiRolePermInfoMap = permissionInfos
, _tiEventTriggerInfoMap = eventTriggerInfos
}) |)
-- permTime <- bindA -< liftIO $ readIORef permTimeRef
-- eventTime <- bindA -< liftIO $ readIORef eventTimeRef
-- bindA -< liftIO $ putStrLn $ "----> [build/perms] " <> show permTime
-- bindA -< liftIO $ putStrLn $ "----> [build/events] " <> show eventTime
-- sql functions
functionCache <- M.catMaybes <$>
for (mapFromL _cfFunction functions) \(CatalogFunction qf systemDefined config funcDefs) -> do
let definition = toJSON $ TrackFunction qf
metadataObject = MetadataObject (MOFunction qf) definition
schemaObject = SOFunction qf
withRecordInconsistency metadataObject $
modifyErr (\e -> "in function " <> qf <<> ": " <> e) do
rawfi <- handleMultipleFunctions qf funcDefs
(fi, deps) <- trackFunctionP2Setup tableNames qf systemDefined config rawfi
recordDependencies metadataObject schemaObject [deps]
pure fi
functionCache <- (mapFromL _cfFunction functions >- returnA)
>-> (| Inc.keyed (\_ (CatalogFunction qf systemDefined config funcDefs) -> do
let definition = toJSON $ TrackFunction qf
metadataObject = MetadataObject (MOFunction qf) definition
schemaObject = SOFunction qf
addFunctionContext e = "in function " <> qf <<> ": " <> e
(| withRecordInconsistency (
(| modifyErrA (do
rawfi <- bindErrorA -< handleMultipleFunctions qf funcDefs
(fi, dep) <- bindErrorA -<
trackFunctionP2Setup tableNames qf systemDefined config rawfi
recordDependencies -< (metadataObject, schemaObject, [dep])
returnA -< fi)
|) addFunctionContext)
|) metadataObject) |)
>-> (\infos -> M.catMaybes infos >- returnA)
-- allow list
let allowList = allowlistDefs
@ -223,15 +245,16 @@ buildSchemaCacheRule = proc inputs -> do
& HS.fromList
-- build GraphQL context with tables and functions
baseGQLSchema <- GS.mkGCtxMap tableCache functionCache
baseGQLSchema <- bindA -< GS.mkGCtxMap tableCache functionCache
-- remote schemas
let invalidatedRemoteSchemas = flip map remoteSchemas \remoteSchema ->
(M.lookup (_arsqName remoteSchema) invalidationMap, remoteSchema)
(remoteSchemaMap, gqlSchema, remoteGQLSchema) <-
foldM addRemoteSchema (M.empty, baseGQLSchema, GC.emptyGCtx) invalidatedRemoteSchemas
(| foldlA' (\schemas schema -> (schemas, schema) >- addRemoteSchema)
|) (M.empty, baseGQLSchema, GC.emptyGCtx) invalidatedRemoteSchemas
pure BuildOutputs
returnA -< BuildOutputs
{ _boTables = tableCache
, _boFunctions = functionCache
, _boRemoteSchemas = remoteSchemaMap
@ -260,201 +283,279 @@ buildSchemaCacheRule = proc inputs -> do
definition = object ["table" .= qt, "configuration" .= configuration]
in MetadataObject objectId definition
withRecordDependencies
:: (MonadWriter (Seq CollectedInfo) m)
=> MetadataObject -> SchemaObjId -> WriterT (Seq SchemaDependency) m a -> m a
withRecordDependencies metadataObject schemaObjectId m = do
(result, dependencies) <- runWriterT m
recordDependencies metadataObject schemaObjectId (toList dependencies)
pure result
bindErrorA
:: (ArrowChoice arr, ArrowKleisli m arr, ArrowError e arr, MonadError e m)
=> arr (m a) a
bindErrorA = (throwA ||| returnA) <<< arrM \m -> (Right <$> m) `catchError` (pure . Left)
withTableContext :: (QErrM m) => QualifiedTable -> m a -> m a
withTableContext tableName = modifyErr \e -> "in table " <> tableName <<> ": " <> e
withRecordDependencies
:: (ArrowWriter (Seq CollectedInfo) arr)
=> WriterA (Seq SchemaDependency) arr (e, s) a
-> arr (e, (MetadataObject, (SchemaObjId, s))) a
withRecordDependencies f = proc (e, (metadataObject, (schemaObjectId, s))) -> do
(result, dependencies) <- runWriterA f -< (e, s)
recordDependencies -< (metadataObject, schemaObjectId, toList dependencies)
returnA -< result
noDuplicates
:: (ArrowChoice arr, ArrowWriter (Seq CollectedInfo) arr)
=> (a -> MetadataObject)
-> [a] `arr` Maybe a
noDuplicates mkMetadataObject = proc values -> case values of
[] -> returnA -< Nothing
[value] -> returnA -< Just value
value:_ -> do
let objectId = _moId $ mkMetadataObject value
definitions = map (_moDefinition . mkMetadataObject) values
tellA -< Seq.singleton $ CIInconsistency (DuplicateObjects objectId definitions)
returnA -< Nothing
addTableContext tableName e = "in table " <> tableName <<> ": " <> e
-- Given a map of table info, “folds in” another map of information, accumulating inconsistent
-- metadata objects for any entries in the second map that dont appear in the first map. This
-- is used to “line up” the metadata for relationships, computed fields, permissions, etc. with
-- the tracked table info.
alignExtraTableInfo
:: forall a b m
. (MonadWriter (Seq CollectedInfo) m)
:: forall a b arr
. (Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr)
=> (b -> MetadataObject)
-> M.HashMap QualifiedTable [b]
-> M.HashMap QualifiedTable a
-> m (M.HashMap QualifiedTable (a, [b]))
alignExtraTableInfo mkMetadataObject extraInfo baseInfo =
fmap M.catMaybes $ sequence $ alignWithKey combine baseInfo extraInfo
-> ( M.HashMap QualifiedTable a
, M.HashMap QualifiedTable [b]
) `arr` M.HashMap QualifiedTable (a, [b])
alignExtraTableInfo mkMetadataObject = proc (baseInfo, extraInfo) -> do
combinedInfo <-
(| Inc.keyed (\tableName infos -> combine -< (tableName, infos))
|) (align baseInfo extraInfo)
returnA -< M.catMaybes combinedInfo
where
combine :: QualifiedTable -> These a [b] -> m (Maybe (a, [b]))
combine tableName = \case
This base -> pure $ Just (base, [])
These base extras -> pure $ Just (base, extras)
combine :: (QualifiedTable, These a [b]) `arr` Maybe (a, [b])
combine = proc (tableName, infos) -> case infos of
This base -> returnA -< Just (base, [])
These base extras -> returnA -< Just (base, extras)
That extras -> do
let errorMessage = "table " <> tableName <<> " does not exist"
Nothing <$ traverse_ (flip recordInconsistency errorMessage . mkMetadataObject) extras
recordInconsistencies -< (map mkMetadataObject extras, errorMessage)
returnA -< Nothing
addNonColumnFields
:: (QErrM m, MonadWriter (Seq CollectedInfo) m)
=> HashSet ForeignKey -- ^ all foreign keys
-> HashSet QualifiedTable -- ^ the names of all tracked tables
-> QualifiedTable
-> FieldInfoMap PGColumnInfo
-> [CatalogRelation]
-> [CatalogComputedField]
-> m (FieldInfoMap FieldInfo)
addNonColumnFields foreignKeys trackedTableNames tableName columns relationships computedFields = do
columnsAndRelationships <- foldM addRelationship (FIColumn <$> columns) relationships
foldM addComputedField columnsAndRelationships computedFields
:: ( Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr
, ArrowKleisli m arr, MonadError QErr m )
=> ( HashSet ForeignKey -- ^ all foreign keys
, HashSet QualifiedTable -- ^ the names of all tracked tables
, FieldInfoMap PGColumnInfo
, [CatalogRelation]
, [CatalogComputedField]
) `arr` FieldInfoMap FieldInfo
addNonColumnFields =
proc (foreignKeys, trackedTableNames, columns, relationships, computedFields) -> do
relationshipInfos <-
(| Inc.keyed (\_ relationshipsByName -> do
maybeRelationship <- noDuplicates mkRelationshipMetadataObject -< relationshipsByName
(\info -> join info >- returnA) <-<
(| traverseA (\relationship -> do
info <- buildRelationship -< (foreignKeys, relationship)
returnA -< info <&> (, mkRelationshipMetadataObject relationship))
|) maybeRelationship)
|) (M.groupOn _crRelName relationships)
computedFieldInfos <-
(| Inc.keyed (\_ computedFieldsByName -> do
maybeComputedField <- noDuplicates mkComputedFieldMetadataObject -< computedFieldsByName
(\info -> join info >- returnA) <-<
(| traverseA (\computedField -> do
info <- buildComputedField -< (trackedTableNames, computedField)
returnA -< info <&> (, mkComputedFieldMetadataObject computedField))
|) maybeComputedField)
|) (M.groupOn (_afcName . _cccComputedField) computedFields)
let mapKey f = M.fromList . map (first f) . M.toList
relationshipFields = mapKey fromRel $ M.catMaybes relationshipInfos
computedFieldFields = mapKey fromComputedField $ M.catMaybes computedFieldInfos
nonColumnFields <-
(| Inc.keyed (\fieldName fields -> noFieldConflicts -< (fieldName, fields))
|) (align relationshipFields computedFieldFields)
(| Inc.keyed (\_ fields -> noColumnConflicts -< fields)
|) (align columns (M.catMaybes nonColumnFields))
where
-- TODO: share code between addRelationship and addComputedField
addRelationship fields relationship@(CatalogRelation _ rn rt rDef _) = do
let metadataObject = mkRelationshipMetadataObject relationship
buildRelationship = proc (foreignKeys, relationship) -> do
let CatalogRelation tableName rn rt rDef _ = relationship
metadataObject = mkRelationshipMetadataObject relationship
schemaObject = SOTableObj tableName $ TORel rn
fmap (fromMaybe fields) $
withRecordInconsistency metadataObject $
withTableContext tableName $
modifyErr (\e -> "in relationship " <> rn <<> ": " <> e) do
(field, dependencies) <- case rt of
ObjRel -> do
using <- decodeValue rDef
let relDef = RelDef rn using Nothing
objRelP2Setup tableName foreignKeys relDef
ArrRel -> do
using <- decodeValue rDef
let relDef = RelDef rn using Nothing
arrRelP2Setup tableName foreignKeys relDef
recordDependencies metadataObject schemaObject dependencies
insertField fields (FIRelationship field)
addRelationshipContext e = "in relationship " <> rn <<> ": " <> e
(| withRecordInconsistency (
(| modifyErrA (do
(info, dependencies) <- bindErrorA -< case rt of
ObjRel -> do
using <- decodeValue rDef
objRelP2Setup tableName foreignKeys (RelDef rn using Nothing)
ArrRel -> do
using <- decodeValue rDef
arrRelP2Setup tableName foreignKeys (RelDef rn using Nothing)
recordDependencies -< (metadataObject, schemaObject, dependencies)
returnA -< info)
|) (addTableContext tableName . addRelationshipContext))
|) metadataObject
addComputedField fields computedField@(CatalogComputedField column funcDefs) = do
let AddComputedField qt name def comment = column
fmap (fromMaybe fields) $
withRecordInconsistency (mkComputedFieldMetadataObject computedField) $
withTableContext tableName $
modifyErr (\e -> "in computed field " <> name <<> ": " <> e) do
rawfi <- handleMultipleFunctions (_cfdFunction def) funcDefs
field <- addComputedFieldP2Setup trackedTableNames qt name def rawfi comment
insertField fields (FIComputedField field)
buildComputedField = proc (trackedTableNames, computedField) -> do
let CatalogComputedField column funcDefs = computedField
AddComputedField qt name def comment = column
addComputedFieldContext e = "in computed field " <> name <<> ": " <> e
(| withRecordInconsistency (
(| modifyErrA (do
rawfi <- bindErrorA -< handleMultipleFunctions (_cfdFunction def) funcDefs
bindErrorA -< addComputedFieldP2Setup trackedTableNames qt name def rawfi comment)
|) (addTableContext qt . addComputedFieldContext))
|) (mkComputedFieldMetadataObject computedField)
insertField
:: (QErrM m)
=> FieldInfoMap FieldInfo
-> FieldInfo
-> m (FieldInfoMap FieldInfo)
insertField fields fieldInfo = case M.lookup name fields of
Just existingFieldInfo -> throw400 AlreadyExists $
"conflicting definitions for field " <> name <<> ":\n"
<> " " <> showFieldType existingFieldInfo <> "\n " <> showFieldType fieldInfo
Nothing -> pure $ M.insert name fieldInfo fields
where
name = fieldInfoName fieldInfo
showFieldType = \case
FIColumn _ -> "postgres column"
FIRelationship _ -> "relationship"
FIComputedField _ -> "computed field"
noFieldConflicts = proc (fieldName, fields) -> case fields of
This (relationship, metadata) -> returnA -< Just (FIRelationship relationship, metadata)
That (computedField, metadata) -> returnA -< Just (FIComputedField computedField, metadata)
These (_, relationshipMetadata) (_, computedFieldMetadata) -> do
tellA -< Seq.singleton $ CIInconsistency $ ConflictingObjects
("conflicting definitions for field " <>> fieldName)
[relationshipMetadata, computedFieldMetadata]
returnA -< Nothing
noColumnConflicts = proc fields -> case fields of
This columnInfo -> returnA -< FIColumn columnInfo
That (fieldInfo, _) -> returnA -< fieldInfo
These columnInfo (_, fieldMetadata) -> do
recordInconsistency -< (fieldMetadata, "field definition conflicts with postgres column")
returnA -< FIColumn columnInfo
buildTablePermissions
:: (MonadTx m, MonadReader BuildReason m, MonadWriter (Seq CollectedInfo) m)
=> TableCoreCache
-> TableCoreInfo FieldInfo
-> [CatalogPermission]
-> m RolePermInfoMap
buildTablePermissions tableCache tableInfo permissions = flip runTableCoreCacheRT tableCache $
traverse (foldM buildAndInsertPermission emptyRolePermInfo) (M.groupOn _cpRole permissions)
where
tableName = _tciName tableInfo
:: ( Inc.ArrowCache arr, Inc.ArrowDistribute arr, ArrowKleisli m arr
, ArrowWriter (Seq CollectedInfo) arr, MonadTx m, MonadReader BuildReason m )
=> ( TableCoreCache
, TableCoreInfo FieldInfo
, [CatalogPermission]
) `arr` RolePermInfoMap
buildTablePermissions = proc (tableCache, tableInfo, tablePermissions) ->
(| Inc.keyed (\_ rolePermissions -> do
let (insertPerms, selectPerms, updatePerms, deletePerms) =
partitionPermissions rolePermissions
buildAndInsertPermission existingInfo permission@(CatalogPermission _ rn pt pDef _) = do
let metadataObject = mkPermissionMetadataObject permission
schemaObject = SOTableObj tableName $ TOPerm rn pt
fmap (fromMaybe existingInfo) $
withRecordInconsistency metadataObject $
withRecordDependencies metadataObject schemaObject $
withTableContext tableName $
modifyErr (\e -> "in permission for role " <> rn <<> ": " <> e) $
case pt of
PTInsert -> insertPermission rn existingInfo PAInsert =<< buildPermission rn pDef
PTSelect -> insertPermission rn existingInfo PASelect =<< buildPermission rn pDef
PTUpdate -> insertPermission rn existingInfo PAUpdate =<< buildPermission rn pDef
PTDelete -> insertPermission rn existingInfo PADelete =<< buildPermission rn pDef
insertPermInfo <- buildPermission -< (tableCache, tableInfo, insertPerms)
selectPermInfo <- buildPermission -< (tableCache, tableInfo, selectPerms)
updatePermInfo <- buildPermission -< (tableCache, tableInfo, updatePerms)
deletePermInfo <- buildPermission -< (tableCache, tableInfo, deletePerms)
returnA -< RolePermInfo
{ _permIns = insertPermInfo
, _permSel = selectPermInfo
, _permUpd = updatePermInfo
, _permDel = deletePermInfo
})
|) (M.groupOn _cpRole tablePermissions)
where
partitionPermissions = flip foldr ([], [], [], []) $
\perm (insertPerms, selectPerms, updatePerms, deletePerms) -> case _cpPermType perm of
PTInsert -> (perm:insertPerms, selectPerms, updatePerms, deletePerms)
PTSelect -> (insertPerms, perm:selectPerms, updatePerms, deletePerms)
PTUpdate -> (insertPerms, selectPerms, perm:updatePerms, deletePerms)
PTDelete -> (insertPerms, selectPerms, updatePerms, perm:deletePerms)
withPermission
:: (ArrowChoice arr, ArrowWriter (Seq CollectedInfo) arr)
=> WriterA (Seq SchemaDependency) (ErrorA QErr arr) (a, s) b
-> arr (a, (CatalogPermission, s)) (Maybe b)
withPermission f = proc (e, (permission, s)) -> do
let CatalogPermission tableName roleName permType _ _ = permission
metadataObject = mkPermissionMetadataObject permission
schemaObject = SOTableObj tableName $ TOPerm roleName permType
addPermContext err = "in permission for role " <> roleName <<> ": " <> err
(| withRecordInconsistency (
(| withRecordDependencies (
(| modifyErrA (f -< (e, s))
|) (addTableContext tableName . addPermContext))
|) metadataObject schemaObject)
|) metadataObject
buildPermission
:: ( MonadTx m
, MonadReader BuildReason m
, MonadWriter (Seq SchemaDependency) m
, TableCoreInfoRM m
, IsPerm a
, FromJSON a
)
=> RoleName -> Value -> m (PermInfo a)
buildPermission rn pDef = do
perm <- decodeValue pDef
let permDef = PermDef rn perm Nothing
(info, dependencies) <- buildPermInfo tableInfo permDef
:: ( Inc.ArrowCache arr, Inc.ArrowDistribute arr, ArrowKleisli m arr
, ArrowWriter (Seq CollectedInfo) arr, MonadTx m, MonadReader BuildReason m
, Eq a, IsPerm a, FromJSON a, Eq (PermInfo a) )
=> ( TableCoreCache
, TableCoreInfo FieldInfo
, [CatalogPermission]
) `arr` Maybe (PermInfo a)
buildPermission = proc (tableCache, tableInfo, permissions) ->
(permissions >- noDuplicates mkPermissionMetadataObject)
>-> (| traverseA (\permission@(CatalogPermission _ roleName _ pDef _) ->
(| withPermission (do
bindErrorA -< when (roleName == adminRole) $
throw400 ConstraintViolation "cannot define permission for admin role"
perm <- bindErrorA -< decodeValue pDef
let permDef = PermDef roleName perm Nothing
(info, dependencies) <- bindErrorA -<
runTableCoreCacheRT (buildPermInfo tableInfo permDef) tableCache
tellA -< Seq.fromList dependencies
rebuildViewsIfNeeded -< (_tciName tableInfo, permDef, info)
returnA -< info)
|) permission) |)
>-> (\info -> join info >- returnA)
rebuildViewsIfNeeded
:: ( Inc.ArrowCache arr, ArrowKleisli m arr, MonadTx m, MonadReader BuildReason m
, Eq a, IsPerm a, Eq (PermInfo a) )
=> (QualifiedTable, PermDef a, PermInfo a) `arr` ()
rebuildViewsIfNeeded = Inc.cache $ arrM \(tableName, permDef, info) -> do
buildReason <- ask
when (buildReason == CatalogUpdate) $
addPermP2Setup tableName permDef info
tell $ Seq.fromList dependencies
pure info
insertPermission
:: (QErrM m) => RoleName -> RolePermInfo -> PermAccessor a -> a -> m RolePermInfo
insertPermission roleName existingInfo accessor newInfo
| roleName == adminRole || has (permAccToLens accessor . _Just) existingInfo =
throw400 AlreadyExists $ "duplicate definition for " <> permissionType <> " permission"
| otherwise = pure (existingInfo & permAccToLens accessor ?~ newInfo)
where
permissionType = case accessor of
PAInsert -> "insert"
PASelect -> "select"
PAUpdate -> "update"
PADelete -> "delete"
buildTableEventTriggers
:: ( MonadIO m
, MonadTx m
, MonadReader BuildReason m
, MonadWriter (Seq CollectedInfo) m
, HasSQLGenCtx m
)
=> TableCoreCache
-> [CatalogEventTrigger]
-> m EventTriggerInfoMap
buildTableEventTriggers tableCache = flip runTableCoreCacheRT tableCache .
flip foldM M.empty \existingTriggers eventTrigger@(CatalogEventTrigger qt trn configuration) -> do
buildReason <- ask
let metadataObject = mkEventTriggerMetadataObject eventTrigger
schemaObjectId = SOTableObj qt $ TOTrigger trn
fmap (fromMaybe existingTriggers) $
withRecordInconsistency metadataObject $
withTableContext qt $
modifyErr (\e -> "in event trigger " <> trn <<> ": " <> e) do
case M.lookup trn existingTriggers of
Just _ -> throw400 AlreadyExists "duplicate definition for event trigger"
Nothing -> do
etc <- decodeValue configuration
(info, dependencies) <- subTableP2Setup qt etc
when (buildReason == CatalogUpdate) $
mkAllTriggersQ trn qt (etcDefinition etc)
recordDependencies metadataObject schemaObjectId dependencies
pure $ M.insert trn info existingTriggers
:: ( Inc.ArrowDistribute arr, ArrowKleisli m arr, ArrowWriter (Seq CollectedInfo) arr
, MonadIO m, MonadTx m, MonadReader BuildReason m
, HasSQLGenCtx m )
=> (TableCoreCache, [CatalogEventTrigger]) `arr` EventTriggerInfoMap
buildTableEventTriggers = proc (tableCache, eventTriggers) ->
(\infos -> M.catMaybes infos >- returnA) <-<
(| Inc.keyed (\_ duplicateEventTriggers -> do
maybeEventTrigger <- noDuplicates mkEventTriggerMetadataObject -< duplicateEventTriggers
(\info -> join info >- returnA) <-<
(| traverseA (\eventTrigger -> buildEventTrigger -< (tableCache, eventTrigger))
|) maybeEventTrigger)
|) (M.groupOn _cetName eventTriggers)
where
buildEventTrigger = proc (tableCache, eventTrigger) -> do
let CatalogEventTrigger qt trn configuration = eventTrigger
metadataObject = mkEventTriggerMetadataObject eventTrigger
schemaObjectId = SOTableObj qt $ TOTrigger trn
addTriggerContext e = "in event trigger " <> trn <<> ": " <> e
(| withRecordInconsistency (
(| modifyErrA (do
etc <- bindErrorA -< decodeValue configuration
(info, dependencies) <- bindErrorA -< subTableP2Setup qt etc
bindErrorA -< flip runTableCoreCacheRT tableCache $ do
buildReason <- ask
when (buildReason == CatalogUpdate) $
mkAllTriggersQ trn qt (etcDefinition etc)
recordDependencies -< (metadataObject, schemaObjectId, dependencies)
returnA -< info)
|) (addTableContext qt . addTriggerContext))
|) metadataObject
addRemoteSchema
:: (MonadIO m, QErrM m, MonadWriter (Seq CollectedInfo) m, HasHttpManager m)
=> (RemoteSchemaMap, GS.GCtxMap, GS.GCtx)
-> (Maybe InvalidationKey, AddRemoteSchemaQuery)
-> m (RemoteSchemaMap, GS.GCtxMap, GS.GCtx)
addRemoteSchema (remoteSchemas, gCtxMap, defGCtx) (_, rs@(AddRemoteSchemaQuery name _ _)) = do
fmap (fromMaybe (remoteSchemas, gCtxMap, defGCtx)) $
withRecordInconsistency (MetadataObject (MORemoteSchema name) (toJSON rs))
case M.lookup name remoteSchemas of
Just _ -> throw400 AlreadyExists "duplicate definition for remote schema"
Nothing -> do
rsCtx <- addRemoteSchemaP2Setup rs
let rGCtx = convRemoteGCtx $ rscGCtx rsCtx
mergedGCtxMap <- mergeRemoteSchema gCtxMap rGCtx
mergedDefGCtx <- mergeGCtx defGCtx rGCtx
pure (M.insert name rsCtx remoteSchemas, mergedGCtxMap, mergedDefGCtx)
:: ( ArrowChoice arr, ArrowWriter (Seq CollectedInfo) arr, ArrowKleisli m arr
, MonadIO m, HasHttpManager m )
=> ( (RemoteSchemaMap, GS.GCtxMap, GS.GCtx)
, (Maybe InvalidationKey, AddRemoteSchemaQuery)
) `arr` (RemoteSchemaMap, GS.GCtxMap, GS.GCtx)
addRemoteSchema = proc ((remoteSchemas, gCtxMap, defGCtx), (_, remoteSchema)) -> do
let name = _arsqName remoteSchema
(| onNothingA (returnA -< (remoteSchemas, gCtxMap, defGCtx)) |) <-<
(| withRecordInconsistency (case M.lookup name remoteSchemas of
Just _ -> throwA -< err400 AlreadyExists "duplicate definition for remote schema"
Nothing -> (throwA ||| returnA) <<< bindA -< runExceptT do
rsCtx <- addRemoteSchemaP2Setup remoteSchema
let rGCtx = convRemoteGCtx $ rscGCtx rsCtx
mergedGCtxMap <- mergeRemoteSchema gCtxMap rGCtx
mergedDefGCtx <- mergeGCtx defGCtx rGCtx
pure (M.insert name rsCtx remoteSchemas, mergedGCtxMap, mergedDefGCtx))
|) (MetadataObject (MORemoteSchema name) (toJSON remoteSchema))
-- | @'withMetadataCheck' cascade action@ runs @action@ and checks if the schema changed as a
-- result. If it did, it checks to ensure the changes do not violate any integrity constraints, and
@ -537,16 +638,17 @@ withMetadataCheck cascade action = do
where
SchemaDiff droppedTables alteredTables = schemaDiff
checkNewInconsistentMeta
:: (QErrM m)
=> [InconsistentMetadataObj] -> [InconsistentMetadataObj] -> m ()
checkNewInconsistentMeta originalInconsMeta currentInconsMeta =
unless (null newInconsMetaObjects) $
throwError (err500 Unexpected "cannot continue due to newly found inconsistent metadata")
{ qeInternal = Just $ toJSON newInconsMetaObjects }
where
newInconsMetaObjects =
M.elems $ M.differenceOn (_moId . _imoObject) currentInconsMeta originalInconsMeta
checkNewInconsistentMeta
:: (QErrM m)
=> [InconsistentMetadata] -> [InconsistentMetadata] -> m ()
checkNewInconsistentMeta originalInconsMeta currentInconsMeta =
unless (null newInconsistentObjects) $
throwError (err500 Unexpected "cannot continue due to newly found inconsistent metadata")
{ qeInternal = Just $ toJSON newInconsistentObjects }
where
diffInconsistentObjects = M.difference `on` groupInconsistentMetadataById
newInconsistentObjects = nub $ concatMap toList $
M.elems (currentInconsMeta `diffInconsistentObjects` originalInconsMeta)
purgeDependentObject :: (MonadTx m) => SchemaObjId -> m ()
purgeDependentObject = \case
@ -559,14 +661,13 @@ purgeDependentObject = \case
-- | Processes collected 'CIDependency' values into a 'DepMap', performing integrity checking to
-- ensure the dependencies actually exist. If a dependency is missing, its transitive dependents are
-- removed from the cache, and 'InconsistentMetadataObj's are returned.
-- removed from the cache, and 'InconsistentMetadata's are returned.
resolveDependencies
:: forall m. (QErrM m)
=> Inc.Rule m
( BuildOutputs
, [(MetadataObject, SchemaObjId, SchemaDependency)]
) (BuildOutputs, [InconsistentMetadataObj], DepMap)
resolveDependencies = Inc.rule \(cache, dependencies) -> do
:: forall arr m. (ArrowKleisli m arr, QErrM m)
=> ( BuildOutputs
, [(MetadataObject, SchemaObjId, SchemaDependency)]
) `arr` (BuildOutputs, [InconsistentMetadata], DepMap)
resolveDependencies = arrM \(cache, dependencies) -> do
let dependencyMap = dependencies
& M.groupOn (view _2)
& fmap (map \(metadataObject, _, schemaDependency) -> (metadataObject, schemaDependency))
@ -576,7 +677,7 @@ resolveDependencies = Inc.rule \(cache, dependencies) -> do
--
-- 1. First, pruneDanglingDependents searches for any dependencies that do not exist in the
-- current cache and removes their dependents from the dependency map, returning an
-- InconsistentMetadataObj for each dependent that was removed. This step does not change
-- InconsistentMetadata for each dependent that was removed. This step does not change
-- the schema cache in any way.
--
-- 2. Second, deleteMetadataObject drops the pruned dependent objects from the cache. It does
@ -589,15 +690,15 @@ resolveDependencies = Inc.rule \(cache, dependencies) -> do
performIteration
:: Int
-> BuildOutputs
-> [InconsistentMetadataObj]
-> [InconsistentMetadata]
-> HashMap SchemaObjId [(MetadataObject, SchemaDependency)]
-> m (BuildOutputs, [InconsistentMetadataObj], DepMap)
-> m (BuildOutputs, [InconsistentMetadata], DepMap)
performIteration iterationNumber cache inconsistencies dependencies = do
let (newInconsistencies, prunedDependencies) = pruneDanglingDependents cache dependencies
case newInconsistencies of
[] -> pure (cache, inconsistencies, HS.fromList . map snd <$> prunedDependencies)
_ | iterationNumber < 100 -> do
let inconsistentIds = _moId . _imoObject <$> newInconsistencies
let inconsistentIds = nub $ concatMap imObjectIds newInconsistencies
prunedCache = foldl' (flip deleteMetadataObject) cache inconsistentIds
allInconsistencies = inconsistencies <> newInconsistencies
performIteration (iterationNumber + 1) prunedCache allInconsistencies prunedDependencies
@ -615,11 +716,11 @@ resolveDependencies = Inc.rule \(cache, dependencies) -> do
pruneDanglingDependents
:: BuildOutputs
-> HashMap SchemaObjId [(MetadataObject, SchemaDependency)]
-> ([InconsistentMetadataObj], HashMap SchemaObjId [(MetadataObject, SchemaDependency)])
-> ([InconsistentMetadata], HashMap SchemaObjId [(MetadataObject, SchemaDependency)])
pruneDanglingDependents cache = fmap (M.filter (not . null)) . traverse do
partitionEithers . map \(metadataObject, dependency) -> case resolveDependency dependency of
Right () -> Right (metadataObject, dependency)
Left errorMessage -> Left (InconsistentMetadataObj metadataObject errorMessage)
Left errorMessage -> Left (InconsistentObject errorMessage metadataObject)
where
resolveDependency :: SchemaDependency -> Either Text ()
resolveDependency (SchemaDependency objectId _) = case objectId of
@ -674,3 +775,39 @@ resolveDependencies = Inc.rule \(cache, dependencies) -> do
MTOPerm roleName permType -> withPermType permType \accessor ->
tiRolePermInfoMap.ix roleName.permAccToLens accessor .~ Nothing
MTOTrigger name -> tiEventTriggerInfoMap %~ M.delete name
{- Note [Specialization of buildRebuildableSchemaCache]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As mentioned in Note [Arrow rewrite rules] in Control.Arrow.Extended and Note [Rule rewrite rules]
in Hasura.Incremental, it is very important that `buildRebuildableSchemaCache` be specialized to
ensure the relevant rules fire. This is a bit subtle, as GHC will only specialize non-class methods
across modules under the following conditions:
(1) The definition is marked INLINABLE.
(2) The use site is not overloaded; i.e. all typeclass constraints are satisfied.
This means that even if we mark `buildRebuildableSchemaCache` INLINABLE, GHC still wont be able to
specialize it unless its immediate use site has a concrete type. If we were to have some polymorphic
function
foo :: (MonadFoo m) => m Bar
foo = do { ...; cache <- buildRebuildableSchemaCache; ... }
then GHC would not be able to specialize `buildRebuildableSchemaCache` unless `foo` is also
specialized, since thats the only way it is able to know which type to specialize it at!
Fortunately, this cross-module specialization is transitive, so as long as we mark `foo` INLINABLE
as well, then when `foo` is specialized, `buildRebuildableSchemaCache` is also specialized. The only
downside to this approach is it means the eventual top-level caller that instantiates the
constraints ends up having to specialize an enormous amount of code all at once, which tends to
bring compile times to a crawl (and may even run out of memory).
A better solution, where possible, is to insert explicit SPECIALIZE pragmas to encourage GHC to do
the specialization early. For example, we could write
{-# SPECIALIZE foo :: FooM Bar #-}
alongside the definition of `foo`, and GHC will immediately produce a specialized version of `foo`
on `FooM`. If a caller then uses `foo` in `FooM`, it will use the specialized version.
I regret this being necessary, but I dont see a way around it. -}

View File

@ -11,19 +11,32 @@ module Hasura.RQL.DDL.Schema.Catalog
import Hasura.Prelude
import qualified Data.Text as T
import qualified Database.PG.Query as Q
-- import Data.Time.Clock
import Data.Aeson
import Hasura.Db
import Hasura.RQL.Types.Catalog
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.SchemaCache
import Hasura.SQL.Types
fetchCatalogData :: (MonadTx m) => m CatalogMetadata
fetchCatalogData = liftTx $ Q.getAltJ . runIdentity . Q.getRow <$> Q.withQE defaultTxErrorHandler
$(Q.sqlFromFile "src-rsr/catalog_metadata.sql") () True
fetchCatalogData = do
-- startTime <- liftTx $ liftIO getCurrentTime
metadataBytes <- (liftTx $ runIdentity . Q.getRow <$> Q.withQE defaultTxErrorHandler $(Q.sqlFromFile "src-rsr/catalog_metadata.sql") () True)
-- afterFetchTime <- liftTx $ liftIO getCurrentTime
-- liftTx $ liftIO $ putStrLn $
-- "----> [fetch/query] " <> show (afterFetchTime `diffUTCTime` startTime)
let !decodedValue = force (eitherDecodeStrict' metadataBytes)
-- afterDecodeTime <- liftTx $ liftIO getCurrentTime
-- liftTx $ liftIO $ putStrLn $
-- "----> [fetch/decode] " <> show (afterDecodeTime `diffUTCTime` afterFetchTime)
decodedValue `onLeft` \err -> throw500 (T.pack err)
saveTableToCatalog :: (MonadTx m) => QualifiedTable -> SystemDefined -> Bool -> TableConfig -> m ()
saveTableToCatalog (QualifiedObject sn tn) systemDefined isEnum config = liftTx $

View File

@ -59,9 +59,9 @@ $(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''ConstraintMeta)
data FunctionMeta
= FunctionMeta
{ fmOid :: !Int
, fmFunction :: !QualifiedFunction
, fmType :: !FunctionType
{ fmOid :: !Int
, fmFunction :: !QualifiedFunction
, fmType :: !FunctionType
} deriving (Show, Eq)
$(deriveJSON (aesonDrop 2 snakeCase) ''FunctionMeta)

View File

@ -21,7 +21,7 @@ import qualified Language.GraphQL.Draft.Syntax as G
import qualified Control.Monad.Validate as MV
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
import qualified Data.HashSet as S
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import qualified Database.PG.Query as Q
@ -39,7 +39,8 @@ data RawFunctionInfo
, rfiDefaultArgs :: !Int
, rfiReturnsTable :: !Bool
, rfiDescription :: !(Maybe PGDescription)
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData RawFunctionInfo
$(deriveJSON (aesonDrop 3 snakeCase) ''RawFunctionInfo)
mkFunctionArgs :: Int -> [QualifiedPGType] -> [FunctionArgName] -> [FunctionArg]
@ -181,7 +182,8 @@ newtype TrackFunction
data FunctionConfig
= FunctionConfig
{ _fcSessionArgument :: !(Maybe FunctionArgName)
} deriving (Show, Eq, Lift, Generic)
} deriving (Show, Eq, Generic, Lift)
instance NFData FunctionConfig
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields = True} ''FunctionConfig)
emptyFunctionConfig :: FunctionConfig

View File

@ -1,3 +1,5 @@
{-# LANGUAGE Arrows #-}
-- | Description: Create/delete SQL tables to/from Hasura metadata.
module Hasura.RQL.DDL.Schema.Table
( TrackTable(..)
@ -36,8 +38,10 @@ import Hasura.SQL.Types
import qualified Database.PG.Query as Q
import qualified Hasura.GraphQL.Context as GC
import qualified Hasura.GraphQL.Schema as GS
import qualified Hasura.Incremental as Inc
import qualified Language.GraphQL.Draft.Syntax as G
import Control.Arrow.Extended
import Control.Lens.Extended hiding ((.=))
import Data.Aeson
import Data.Aeson.Casing
@ -46,7 +50,7 @@ import Instances.TH.Lift ()
import Language.Haskell.TH.Syntax (Lift)
import Network.URI.Extended ()
import qualified Data.HashMap.Strict as M
import qualified Data.HashMap.Strict.Extended as M
import qualified Data.Text as T
data TrackTable
@ -287,44 +291,57 @@ delTableAndDirectDeps qtn@(QualifiedObject sn tn) = do
-- '_tiRolePermInfoMap' or '_tiEventTriggerInfoMap' at all, and '_tiFieldInfoMap' only contains
-- columns, not relationships; those pieces of information are filled in by later stages.
buildTableCache
:: forall m. (MonadTx m, CacheBuildM m)
=> [CatalogTable] -> m (M.HashMap QualifiedTable (TableCoreInfo PGColumnInfo))
buildTableCache = processTableCache <=< buildRawTableCache
:: forall arr m
. (Inc.ArrowDistribute arr, ArrowWriter (Seq CollectedInfo) arr, ArrowKleisli m arr, MonadTx m)
=> [CatalogTable] `arr` M.HashMap QualifiedTable (TableCoreInfo PGColumnInfo)
buildTableCache = proc catalogTables -> do
rawTableInfos <-
(| Inc.keyed (| withTable (\tables -> buildRawTableInfo <<< noDuplicates -< tables) |)
|) (M.groupOnNE _ctName catalogTables)
let rawTableCache = M.catMaybes rawTableInfos
enumTables = M.mapMaybe _tciEnumValues rawTableCache
tableInfos <-
(| Inc.keyed (| withTable (\table -> processTableInfo -< (enumTables, table)) |)
|) rawTableCache
returnA -< M.catMaybes tableInfos
where
withTable name = withRecordInconsistency $ MetadataObject (MOTable name) (toJSON name)
withTable :: ErrorA QErr arr (e, s) a -> arr (e, (QualifiedTable, s)) (Maybe a)
withTable f = withRecordInconsistency f <<<
second (first $ arr \name -> MetadataObject (MOTable name) (toJSON name))
noDuplicates = proc tables -> case tables of
table :| [] -> returnA -< table
_ -> throwA -< err400 AlreadyExists "duplication definition for table"
-- Step 1: Build the raw table cache from metadata information.
buildRawTableCache
:: [CatalogTable]
-> m (M.HashMap QualifiedTable (TableCoreInfo PGRawColumnInfo))
buildRawTableCache catalogTables = fmap (M.fromList . catMaybes) . for catalogTables $
\(CatalogTable name systemDefined isEnum config maybeInfo) -> withTable name $ do
catalogInfo <- onNothing maybeInfo $
throw400 NotExists $ "no such table/view exists in postgres: " <>> name
buildRawTableInfo :: ErrorA QErr arr CatalogTable (TableCoreInfo PGRawColumnInfo)
buildRawTableInfo = proc (CatalogTable name systemDefined isEnum config maybeInfo) -> do
catalogInfo <-
(| onNothingA (throwA -<
err400 NotExists $ "no such table/view exists in postgres: " <>> name)
|) maybeInfo
let CatalogTableInfo columns constraints primaryKeyColumnNames viewInfo maybeDesc = catalogInfo
primaryKeyColumns = flip filter columns $ \column ->
prciName column `elem` primaryKeyColumnNames
fetchEnumValues = fetchAndValidateEnumValues name primaryKeyColumns columns
maybeEnumValues <- if isEnum then Just <$> fetchEnumValues else pure Nothing
let info = TableCoreInfo
{ _tciName = name
, _tciSystemDefined = systemDefined
, _tciFieldInfoMap = mapFromL (fromPGCol . prciName) columns
, _tciUniqueOrPrimaryKeyConstraints = constraints
, _tciPrimaryKeyColumns = primaryKeyColumnNames
, _tciViewInfo = viewInfo
, _tciEnumValues = maybeEnumValues
, _tciCustomConfig = config
, _tciDescription = maybeDesc
}
maybeEnumValues <- if isEnum
then bindA -< Just <$> fetchAndValidateEnumValues name primaryKeyColumns columns
else returnA -< Nothing
-- validate tableConfig
-- FIXME
-- withPathK "configuration" $ validateTableConfig info config
pure (name, info)
returnA -< TableCoreInfo
{ _tciName = name
, _tciSystemDefined = systemDefined
, _tciFieldInfoMap = mapFromL (fromPGCol . prciName) columns
, _tciUniqueOrPrimaryKeyConstraints = constraints
, _tciPrimaryKeyColumns = primaryKeyColumnNames
, _tciViewInfo = viewInfo
, _tciEnumValues = maybeEnumValues
, _tciCustomConfig = config
, _tciDescription = maybeDesc
}
-- validateTableConfig :: TableCoreInfo a -> TableConfig -> m ()
-- validateTableConfig tableInfo (TableConfig rootFlds colFlds) = do
@ -344,71 +361,52 @@ buildTableCache = processTableCache <=< buildRawTableCache
-- Step 2: Process the raw table cache to replace Postgres column types with logical column
-- types.
processTableCache
:: M.HashMap QualifiedTable (TableCoreInfo PGRawColumnInfo)
-> m (M.HashMap QualifiedTable (TableCoreInfo PGColumnInfo))
processTableCache rawTables = fmap (M.mapMaybe id) . for rawTables $ \rawInfo -> do
processTableInfo
:: ErrorA QErr arr
( M.HashMap QualifiedTable EnumValues
, TableCoreInfo PGRawColumnInfo
) (TableCoreInfo PGColumnInfo)
processTableInfo = (throwA ||| returnA) <<< arr \(enumTables, rawInfo) -> runExcept do
let tableName = _tciName rawInfo
customFields = _tcCustomColumnNames $ _tciCustomConfig rawInfo
withTable tableName $ rawInfo
& tciFieldInfoMap.traverse %%~ processColumnInfo enumTables customFields tableName
process = processColumnInfo enumTables customFields tableName
traverseOf (tciFieldInfoMap.traverse) process rawInfo
-- | “Processes” a 'PGRawColumnInfo' into a 'PGColumnInfo' by resolving its type using a map of
-- known enum tables.
processColumnInfo
:: (QErrM n)
=> M.HashMap QualifiedTable EnumValues -- ^ known enum tables
-> CustomColumnNames -- ^ customised graphql names
-> QualifiedTable -- ^ the table this column belongs to
-> PGRawColumnInfo -- ^ the columns raw information
-> n PGColumnInfo
processColumnInfo enumTables customFields tableName rawInfo = do
resolvedType <- resolveColumnType
pure PGColumnInfo
{ pgiColumn = pgCol
, pgiName = graphqlName
, pgiType = resolvedType
, pgiIsNullable = prciIsNullable rawInfo
, pgiDescription = prciDescription rawInfo
}
where
enumTables = M.mapMaybe _tciEnumValues rawTables
pgCol = prciName rawInfo
graphqlName = fromMaybe (G.Name $ getPGColTxt pgCol) $
M.lookup pgCol customFields
resolveColumnType =
case prciReferences rawInfo of
-- no referenced tables? definitely not an enum
[] -> pure $ PGColumnScalar (prciType rawInfo)
-- FIXME
-- validateWithExistingColumns :: FieldInfoMap PGRawColumnInfo -> CustomColumnNames -> m ()
-- validateWithExistingColumns columnFields customColumnNames = do
-- withPathK "custom_column_names" $ do
-- -- Check all keys are valid columns
-- forM_ (M.keys customColumnNames) $ \col -> void $ askPGColInfo columnFields col ""
-- let columns = getCols columnFields
-- defaultNameMap = M.fromList $ flip map columns $
-- \col -> ( prciName col
-- , G.Name $ getPGColTxt $ prciName col
-- )
-- customNames = M.elems $ defaultNameMap `M.union` customColumnNames
-- conflictingCustomNames = duplicates customNames
--
-- when (not $ null conflictingCustomNames) $ throw400 NotSupported $
-- "the following custom column names are conflicting: " <> showNames conflictingCustomNames
-- one referenced table? might be an enum, so check if the referenced table is an enum
[referencedTableName] -> pure $ M.lookup referencedTableName enumTables & maybe
(PGColumnScalar $ prciType rawInfo)
(PGColumnEnumReference . EnumReference referencedTableName)
-- | “Processes” a 'PGRawColumnInfo' into a 'PGColumnInfo' by resolving its type using a map of known
-- enum tables.
processColumnInfo
:: (QErrM m)
=> M.HashMap QualifiedTable EnumValues -- ^ known enum tables
-> CustomColumnNames -- ^ customised graphql names
-> QualifiedTable -- ^ the table this column belongs to
-> PGRawColumnInfo -- ^ the columns raw information
-> m PGColumnInfo
processColumnInfo enumTables customFields tableName rawInfo = do
resolvedType <- resolveColumnType
pure PGColumnInfo
{ pgiColumn = pgCol
, pgiName = graphqlName
, pgiType = resolvedType
, pgiIsNullable = prciIsNullable rawInfo
, pgiDescription = prciDescription rawInfo
}
where
pgCol = prciName rawInfo
graphqlName = fromMaybe (G.Name $ getPGColTxt pgCol) $
M.lookup pgCol customFields
resolveColumnType =
case prciReferences rawInfo of
-- no referenced tables? definitely not an enum
[] -> pure $ PGColumnScalar (prciType rawInfo)
-- one referenced table? might be an enum, so check if the referenced table is an enum
[referencedTableName] -> pure $ M.lookup referencedTableName enumTables & maybe
(PGColumnScalar $ prciType rawInfo)
(PGColumnEnumReference . EnumReference referencedTableName)
-- multiple referenced tables? we could check if any of them are enums, but the schema is
-- strange, so lets just reject it
referencedTables -> throw400 ConstraintViolation
$ "cannot handle exotic schema: column " <> prciName rawInfo <<> " in table "
<> tableName <<> " references multiple foreign tables ("
<> T.intercalate ", " (map dquote referencedTables) <> ")?"
-- multiple referenced tables? we could check if any of them are enums, but the schema
-- is strange, so lets just reject it
referencedTables -> throw400 ConstraintViolation
$ "cannot handle exotic schema: column " <> prciName rawInfo <<> " in table "
<> tableName <<> " references multiple foreign tables ("
<> T.intercalate ", " (map dquote referencedTables) <> ")?"

View File

@ -4,11 +4,41 @@ module Hasura.RQL.Instances where
import Hasura.Prelude
import Instances.TH.Lift ()
import qualified Language.Haskell.TH.Syntax as TH
import Instances.TH.Lift ()
import qualified Language.Haskell.TH.Syntax as TH
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
import qualified Language.GraphQL.Draft.Syntax as G
instance NFData G.Argument
instance NFData G.Directive
instance NFData G.ExecutableDefinition
instance NFData G.Field
instance NFData G.FragmentDefinition
instance NFData G.FragmentSpread
instance NFData G.GType
instance NFData G.InlineFragment
instance NFData G.OperationDefinition
instance NFData G.OperationType
instance NFData G.Selection
instance NFData G.TypedOperationDefinition
instance NFData G.Value
instance NFData G.ValueConst
instance NFData G.VariableDefinition
instance (NFData a) => NFData (G.ObjectFieldG a)
deriving instance NFData G.Alias
deriving instance NFData G.EnumValue
deriving instance NFData G.ExecutableDocument
deriving instance NFData G.ListType
deriving instance NFData G.Name
deriving instance NFData G.NamedType
deriving instance NFData G.Nullability
deriving instance NFData G.StringValue
deriving instance NFData G.Variable
deriving instance (NFData a) => NFData (G.ListValueG a)
deriving instance (NFData a) => NFData (G.ObjectValueG a)
instance (TH.Lift k, TH.Lift v) => TH.Lift (M.HashMap k v) where
lift m = [| M.fromList $(TH.lift $ M.toList m) |]

View File

@ -43,26 +43,26 @@ import Hasura.EncJSON
import Hasura.Prelude
import Hasura.SQL.Types
import Hasura.Db as R
import Hasura.RQL.Types.BoolExp as R
import Hasura.RQL.Types.Column as R
import Hasura.RQL.Types.Common as R
import Hasura.RQL.Types.ComputedField as R
import Hasura.RQL.Types.DML as R
import Hasura.RQL.Types.Error as R
import Hasura.RQL.Types.EventTrigger as R
import Hasura.RQL.Types.Function as R
import Hasura.RQL.Types.Metadata as R
import Hasura.RQL.Types.Permission as R
import Hasura.RQL.Types.RemoteSchema as R
import Hasura.RQL.Types.SchemaCache as R
import Hasura.Db as R
import Hasura.RQL.Types.BoolExp as R
import Hasura.RQL.Types.Column as R
import Hasura.RQL.Types.Common as R
import Hasura.RQL.Types.ComputedField as R
import Hasura.RQL.Types.DML as R
import Hasura.RQL.Types.Error as R
import Hasura.RQL.Types.EventTrigger as R
import Hasura.RQL.Types.Function as R
import Hasura.RQL.Types.Metadata as R
import Hasura.RQL.Types.Permission as R
import Hasura.RQL.Types.RemoteSchema as R
import Hasura.RQL.Types.SchemaCache as R
import Hasura.RQL.Types.SchemaCache.Build as R
import qualified Hasura.GraphQL.Context as GC
import qualified Hasura.GraphQL.Context as GC
import qualified Data.HashMap.Strict as M
import qualified Data.Text as T
import qualified Network.HTTP.Client as HTTP
import qualified Data.HashMap.Strict as M
import qualified Data.Text as T
import qualified Network.HTTP.Client as HTTP
data QCtx
= QCtx
@ -124,6 +124,8 @@ askEventTriggerInfo trn = do
class (Monad m) => HasHttpManager m where
askHttpManager :: m HTTP.Manager
instance (HasHttpManager m) => HasHttpManager (ExceptT e m) where
askHttpManager = lift askHttpManager
instance (HasHttpManager m) => HasHttpManager (ReaderT r m) where
askHttpManager = lift askHttpManager
instance (HasHttpManager m) => HasHttpManager (StateT s m) where

View File

@ -37,7 +37,8 @@ data CatalogTableInfo
, _ctiPrimaryKeyColumns :: ![PGCol]
, _ctiViewInfo :: !(Maybe ViewInfo)
, _ctiDescription :: !(Maybe PGDescription)
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData CatalogTableInfo
$(deriveJSON (aesonDrop 4 snakeCase) ''CatalogTableInfo)
data CatalogTable
@ -47,7 +48,8 @@ data CatalogTable
, _ctIsEnum :: !Bool
, _ctConfiguration :: !TableConfig
, _ctInfo :: !(Maybe CatalogTableInfo)
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData CatalogTable
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogTable)
data CatalogRelation
@ -57,7 +59,8 @@ data CatalogRelation
, _crRelType :: !RelType
, _crDef :: !Value
, _crComment :: !(Maybe Text)
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData CatalogRelation
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogRelation)
data CatalogPermission
@ -67,14 +70,16 @@ data CatalogPermission
, _cpPermType :: !PermType
, _cpDef :: !Value
, _cpComment :: !(Maybe Text)
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData CatalogPermission
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogPermission)
data CatalogComputedField
= CatalogComputedField
{ _cccComputedField :: !AddComputedField
, _cccFunctionInfo :: ![RawFunctionInfo] -- ^ multiple functions with same name
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData CatalogComputedField
$(deriveJSON (aesonDrop 4 snakeCase) ''CatalogComputedField)
data CatalogEventTrigger
@ -82,7 +87,8 @@ data CatalogEventTrigger
{ _cetTable :: !QualifiedTable
, _cetName :: !TriggerName
, _cetDef :: !Value
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData CatalogEventTrigger
$(deriveJSON (aesonDrop 4 snakeCase) ''CatalogEventTrigger)
data CatalogFunction
@ -91,7 +97,8 @@ data CatalogFunction
, _cfIsSystemDefined :: !SystemDefined
, _cfConfiguration :: !FunctionConfig
, _cfInfo :: ![RawFunctionInfo] -- ^ multiple functions with same name
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData CatalogFunction
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogFunction)
data CatalogMetadata
@ -105,5 +112,6 @@ data CatalogMetadata
, _cmForeignKeys :: ![ForeignKey]
, _cmAllowlistCollections :: ![CollectionDef]
, _cmComputedFields :: ![CatalogComputedField]
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData CatalogMetadata
$(deriveJSON (aesonDrop 3 snakeCase) ''CatalogMetadata)

View File

@ -132,7 +132,8 @@ data PGRawColumnInfo
-- ^ only stores single-column references to primary key of foreign tables (used for detecting
-- references to enum tables)
, prciDescription :: !(Maybe PGDescription)
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData PGRawColumnInfo
$(deriveJSON (aesonDrop 4 snakeCase) ''PGRawColumnInfo)
-- | “Resolved” column info, produced from a 'PGRawColumnInfo' value that has been combined with other

View File

@ -45,7 +45,7 @@ import qualified PostgreSQL.Binary.Decoding as PD
import qualified Test.QuickCheck as QC
newtype NonEmptyText = NonEmptyText {unNonEmptyText :: T.Text}
deriving (Show, Eq, Ord, Hashable, ToJSON, ToJSONKey, Lift, Q.ToPrepArg, DQuote, Generic)
deriving (Show, Eq, Ord, Hashable, ToJSON, ToJSONKey, Lift, Q.ToPrepArg, DQuote, Generic, NFData)
instance Arbitrary NonEmptyText where
arbitrary = NonEmptyText . T.pack <$> QC.listOf1 (QC.elements alphaNumerics)
@ -76,8 +76,8 @@ rootText :: NonEmptyText
rootText = NonEmptyText "root"
newtype RelName
= RelName {getRelTxt :: NonEmptyText}
deriving (Show, Eq, Hashable, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Lift, Generic, Arbitrary)
= RelName { getRelTxt :: NonEmptyText }
deriving (Show, Eq, Hashable, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Lift, Generic, Arbitrary, NFData)
instance IsIden RelName where
toIden rn = Iden $ relNameToTxt rn
@ -99,7 +99,7 @@ data RelType
= ObjRel
| ArrRel
deriving (Show, Eq, Generic)
instance NFData RelType
instance Hashable RelType
instance ToJSON RelType where
@ -129,7 +129,7 @@ $(deriveToJSON (aesonDrop 2 snakeCase) ''RelInfo)
newtype FieldName
= FieldName { getFieldNameTxt :: T.Text }
deriving (Show, Eq, Ord, Hashable, FromJSON, ToJSON, FromJSONKey, ToJSONKey, Lift, Data, Generic, Arbitrary)
deriving (Show, Eq, Ord, Hashable, FromJSON, ToJSON, FromJSONKey, ToJSONKey, Lift, Data, Generic, Arbitrary, NFData)
instance IsIden FieldName where
toIden (FieldName f) = Iden f
@ -181,6 +181,7 @@ data ForeignKey
, _fkConstraint :: !ConstraintName
, _fkColumnMapping :: !ColMapping
} deriving (Show, Eq, Generic)
instance NFData ForeignKey
$(deriveJSON (aesonDrop 3 snakeCase) ''ForeignKey)
instance Hashable ForeignKey
@ -188,7 +189,7 @@ instance Hashable ForeignKey
type CustomColumnNames = HM.HashMap PGCol G.Name
newtype SystemDefined = SystemDefined { unSystemDefined :: Bool }
deriving (Show, Eq, FromJSON, ToJSON, Q.ToPrepArg)
deriving (Show, Eq, FromJSON, ToJSON, Q.ToPrepArg, NFData)
isSystemDefined :: SystemDefined -> Bool
isSystemDefined = unSystemDefined

View File

@ -21,7 +21,7 @@ import qualified Database.PG.Query as Q
newtype ComputedFieldName =
ComputedFieldName { unComputedFieldName :: NonEmptyText}
deriving (Show, Eq, Lift, FromJSON, ToJSON, Q.ToPrepArg, DQuote, Hashable, Q.FromCol, Generic, Arbitrary)
deriving (Show, Eq, NFData, Lift, FromJSON, ToJSON, Q.ToPrepArg, DQuote, Hashable, Q.FromCol, Generic, Arbitrary)
computedFieldNameToText :: ComputedFieldName -> Text
computedFieldNameToText = unNonEmptyText . unComputedFieldName

View File

@ -1,3 +1,5 @@
{-# LANGUAGE Arrows #-}
module Hasura.RQL.Types.Error
( Code(..)
, QErr(..)
@ -25,6 +27,7 @@ module Hasura.RQL.Types.Error
, modifyErr
, modifyErrAndSet500
, modifyQErr
, modifyErrA
-- Attach context
, withPathK
@ -36,15 +39,16 @@ module Hasura.RQL.Types.Error
, indexedMapM_
) where
import Control.Arrow.Extended
import Data.Aeson
import Data.Aeson.Internal
import Data.Aeson.Types
import qualified Database.PG.Query as Q
import qualified Database.PG.Query as Q
import Hasura.Prelude
import Text.Show (Show (..))
import Text.Show (Show (..))
import qualified Data.Text as T
import qualified Network.HTTP.Types as N
import qualified Data.Text as T
import qualified Network.HTTP.Types as N
data Code
= PermissionDenied
@ -238,6 +242,9 @@ modifyErr :: (QErrM m)
-> m a -> m a
modifyErr f = modifyQErr (liftTxtMod f)
modifyErrA :: (ArrowError QErr arr) => arr (e, s) a -> arr (e, (Text -> Text, s)) a
modifyErrA f = proc (e, (g, s)) -> (| mapErrorA (f -< (e, s)) |) (liftTxtMod g)
liftTxtMod :: (T.Text -> T.Text) -> QErr -> QErr
liftTxtMod f (QErr path st s c i) = QErr path st (f s) c i

View File

@ -38,7 +38,7 @@ import qualified Database.PG.Query as Q
import qualified Text.Regex.TDFA as TDFA
newtype TriggerName = TriggerName { unTriggerName :: NonEmptyText }
deriving (Show, Eq, Hashable, Lift, DQuote, FromJSON, ToJSON, ToJSONKey, Q.FromCol, Q.ToPrepArg, Generic, Arbitrary)
deriving (Show, Eq, Hashable, Lift, DQuote, FromJSON, ToJSON, ToJSONKey, Q.FromCol, Q.ToPrepArg, Generic, Arbitrary, NFData)
triggerNameToTxt :: TriggerName -> Text
triggerNameToTxt = unNonEmptyText . unTriggerName

View File

@ -17,8 +17,8 @@ data FunctionType
= FTVOLATILE
| FTIMMUTABLE
| FTSTABLE
deriving (Eq)
deriving (Eq, Generic)
instance NFData FunctionType
$(deriveJSON defaultOptions{constructorTagModifier = drop 2} ''FunctionType)
funcTypToTxt :: FunctionType -> T.Text
@ -31,7 +31,7 @@ instance Show FunctionType where
newtype FunctionArgName =
FunctionArgName { getFuncArgNameTxt :: T.Text}
deriving (Show, Eq, ToJSON, FromJSON, Lift, DQuote, IsString, Generic, Arbitrary)
deriving (Show, Eq, NFData, ToJSON, FromJSON, Lift, DQuote, IsString, Generic, Arbitrary)
newtype HasDefault = HasDefault { unHasDefault :: Bool }
deriving (Show, Eq, ToJSON)

View File

@ -1,10 +1,10 @@
module Hasura.RQL.Types.Metadata where
import qualified Data.HashMap.Strict.Extended as M
import Data.Aeson
import Hasura.Prelude
import qualified Data.Text as T
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.ComputedField
import Hasura.RQL.Types.EventTrigger
@ -28,29 +28,73 @@ data MetadataObjId
deriving (Show, Eq, Generic)
instance Hashable MetadataObjId
moiTypeName :: MetadataObjId -> Text
moiTypeName = \case
MOTable _ -> "table"
MOFunction _ -> "function"
MORemoteSchema _ -> "remote_schema"
MOTableObj _ tableObjectId -> case tableObjectId of
MTORel _ relType -> relTypeToTxt relType <> "_relation"
MTOPerm _ permType -> permTypeToCode permType <> "_permission"
MTOTrigger _ -> "event_trigger"
MTOComputedField _ -> "computed_field"
moiName :: MetadataObjId -> Text
moiName objectId = moiTypeName objectId <> " " <> case objectId of
MOTable name -> dquoteTxt name
MOFunction name -> dquoteTxt name
MORemoteSchema name -> dquoteTxt name
MOTableObj tableName tableObjectId ->
let tableObjectName = case tableObjectId of
MTORel name _ -> dquoteTxt name
MTOComputedField name -> dquoteTxt name
MTOPerm name _ -> dquoteTxt name
MTOTrigger name -> dquoteTxt name
in tableObjectName <> " in " <> moiName (MOTable tableName)
data MetadataObject
= MetadataObject
{ _moId :: !MetadataObjId
{ _moId :: !MetadataObjId
, _moDefinition :: !Value
} deriving (Show, Eq)
data InconsistentMetadataObj
= InconsistentMetadataObj
{ _imoObject :: !MetadataObject
, _imoReason :: !T.Text
} deriving (Show, Eq)
data InconsistentMetadata
= InconsistentObject !Text !MetadataObject
| ConflictingObjects !Text ![MetadataObject]
| DuplicateObjects !MetadataObjId ![Value]
deriving (Show, Eq)
instance ToJSON InconsistentMetadataObj where
toJSON (InconsistentMetadataObj (MetadataObject objectId info) rsn) = object
[ "type" .= String case objectId of
MOTable _ -> "table"
MOFunction _ -> "function"
MORemoteSchema _ -> "remote_schema"
MOTableObj _ tableObjectId -> case tableObjectId of
MTORel _ relType -> relTypeToTxt relType <> "_relation"
MTOPerm _ permType -> permTypeToCode permType <> "_permission"
MTOTrigger _ -> "event_trigger"
MTOComputedField _ -> "computed_field"
, "definition" .= info
, "reason" .= rsn
]
imObjectIds :: InconsistentMetadata -> [MetadataObjId]
imObjectIds = \case
InconsistentObject _ metadata -> [_moId metadata]
ConflictingObjects _ metadatas -> map _moId metadatas
DuplicateObjects objectId _ -> [objectId]
imReason :: InconsistentMetadata -> Text
imReason = \case
InconsistentObject reason _ -> reason
ConflictingObjects reason _ -> reason
DuplicateObjects objectId _ -> "multiple definitions for " <> moiName objectId
-- | Builds a map from each unique metadata object id to the inconsistencies associated with it.
-- Note that a single inconsistency can involve multiple metadata objects, so the same inconsistency
-- may appear in the resulting map multiple times!
groupInconsistentMetadataById
:: [InconsistentMetadata] -> HashMap MetadataObjId (NonEmpty InconsistentMetadata)
groupInconsistentMetadataById = M.fromListWith (<>) . concatMap \metadata ->
map (, metadata :| []) (imObjectIds metadata)
instance ToJSON InconsistentMetadata where
toJSON inconsistentMetadata = object (("reason" .= imReason inconsistentMetadata) : extraFields)
where
extraFields = case inconsistentMetadata of
InconsistentObject _ metadata -> metadataObjectFields metadata
ConflictingObjects _ metadatas ->
[ "objects" .= map (object . metadataObjectFields) metadatas ]
DuplicateObjects objectId definitions ->
[ "type" .= String (moiTypeName objectId)
, "definitions" .= definitions ]
metadataObjectFields (MetadataObject objectId definition) =
[ "type" .= String (moiTypeName objectId)
, "definition" .= definition ]

View File

@ -45,7 +45,7 @@ import qualified PostgreSQL.Binary.Decoding as PD
newtype RoleName
= RoleName {getRoleTxt :: NonEmptyText}
deriving ( Show, Eq, Ord, Hashable, FromJSONKey, ToJSONKey, FromJSON
, ToJSON, Q.FromCol, Q.ToPrepArg, Lift, Generic, Arbitrary)
, ToJSON, Q.FromCol, Q.ToPrepArg, Lift, Generic, Arbitrary, NFData )
instance DQuote RoleName where
dquoteTxt = roleNameToTxt
@ -121,7 +121,8 @@ data PermType
| PTSelect
| PTUpdate
| PTDelete
deriving (Eq, Lift)
deriving (Eq, Lift, Generic)
instance NFData PermType
instance Q.FromCol PermType where
fromCol bs = flip Q.fromColHelper bs $ PD.enum $ \case

View File

@ -24,15 +24,15 @@ newtype CollectionName
newtype QueryName
= QueryName {unQueryName :: NonEmptyText}
deriving (Show, Eq, Ord, Hashable, Lift, ToJSON, ToJSONKey, FromJSON, DQuote, Generic, Arbitrary)
deriving (Show, Eq, Ord, NFData, Hashable, Lift, ToJSON, ToJSONKey, FromJSON, DQuote, Generic, Arbitrary)
newtype GQLQuery
= GQLQuery {unGQLQuery :: G.ExecutableDocument}
deriving (Show, Eq, Hashable, Lift, ToJSON, FromJSON)
deriving (Show, Eq, NFData, Hashable, Lift, ToJSON, FromJSON)
newtype GQLQueryWithText
= GQLQueryWithText (T.Text, GQLQuery)
deriving (Show, Eq, Lift, Generic)
deriving (Show, Eq, NFData, Lift, Generic)
instance FromJSON GQLQueryWithText where
parseJSON v@(String t) = GQLQueryWithText . (t, ) <$> parseJSON v
@ -54,6 +54,7 @@ data ListedQuery
{ _lqName :: !QueryName
, _lqQuery :: !GQLQueryWithText
} deriving (Show, Eq, Lift, Generic)
instance NFData ListedQuery
$(deriveJSON (aesonDrop 3 snakeCase) ''ListedQuery)
type QueryList = [ListedQuery]
@ -61,7 +62,7 @@ type QueryList = [ListedQuery]
newtype CollectionDef
= CollectionDef
{ _cdQueries :: QueryList }
deriving (Show, Eq, Lift, Generic)
deriving (Show, Eq, Lift, Generic, NFData)
$(deriveJSON (aesonDrop 3 snakeCase) ''CollectionDef)
data CreateCollection

View File

@ -22,7 +22,7 @@ newtype RemoteSchemaName
= RemoteSchemaName
{ unRemoteSchemaName :: NonEmptyText }
deriving ( Show, Eq, Lift, Hashable, J.ToJSON, J.ToJSONKey
, J.FromJSON, Q.ToPrepArg, Q.FromCol, DQuote
, J.FromJSON, Q.ToPrepArg, Q.FromCol, DQuote, NFData
, Generic, Arbitrary
)
@ -46,6 +46,7 @@ data RemoteSchemaDef
, _rsdForwardClientHeaders :: !Bool
, _rsdTimeoutSeconds :: !(Maybe Int)
} deriving (Show, Eq, Lift, Generic)
instance NFData RemoteSchemaDef
$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''RemoteSchemaDef)
instance J.FromJSON RemoteSchemaDef where
@ -63,6 +64,7 @@ data AddRemoteSchemaQuery
, _arsqDefinition :: !RemoteSchemaDef
, _arsqComment :: !(Maybe Text)
} deriving (Show, Eq, Lift, Generic)
instance NFData AddRemoteSchemaQuery
$(J.deriveJSON (J.aesonDrop 5 J.snakeCase) ''AddRemoteSchemaQuery)
newtype RemoteSchemaNameQuery

View File

@ -2,8 +2,8 @@
-- warning, so dont treat it as an error even if -Werror is enabled.
{-# OPTIONS_GHC -Wwarn=redundant-constraints #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE UndecidableInstances #-}
module Hasura.RQL.Types.SchemaCache
@ -112,6 +112,7 @@ module Hasura.RQL.Types.SchemaCache
import qualified Hasura.GraphQL.Context as GC
import Hasura.Db
import Hasura.Prelude
import Hasura.RQL.Types.BoolExp
import Hasura.RQL.Types.Column
@ -126,7 +127,6 @@ import Hasura.RQL.Types.QueryCollection
import Hasura.RQL.Types.RemoteSchema
import Hasura.RQL.Types.SchemaCacheTypes
import Hasura.SQL.Types
import Hasura.Db
import Control.Lens
import Data.Aeson
@ -281,7 +281,8 @@ data ConstraintType
| CTFOREIGNKEY
| CTPRIMARYKEY
| CTUNIQUE
deriving Eq
deriving (Eq, Generic)
instance NFData ConstraintType
constraintTyToTxt :: ConstraintType -> T.Text
constraintTyToTxt ty = case ty of
@ -319,7 +320,8 @@ data TableConstraint
= TableConstraint
{ tcType :: !ConstraintType
, tcName :: !ConstraintName
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData TableConstraint
$(deriveJSON (aesonDrop 2 snakeCase) ''TableConstraint)
@ -328,8 +330,8 @@ data ViewInfo
{ viIsUpdatable :: !Bool
, viIsDeletable :: !Bool
, viIsInsertable :: !Bool
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData ViewInfo
$(deriveJSON (aesonDrop 2 snakeCase) ''ViewInfo)
isMutable :: (ViewInfo -> Bool) -> Maybe ViewInfo -> Bool
@ -348,6 +350,7 @@ data TableConfig
{ _tcCustomRootFields :: !GC.TableCustomRootFields
, _tcCustomColumnNames :: !CustomColumnNames
} deriving (Show, Eq, Lift, Generic)
instance NFData TableConfig
$(deriveToJSON (aesonDrop 3 snakeCase) ''TableConfig)
emptyTableConfig :: TableConfig
@ -362,24 +365,24 @@ instance FromJSON TableConfig where
data TableCoreInfo fieldInfo
= TableCoreInfo
{ _tciName :: !QualifiedTable
, _tciDescription :: !(Maybe PGDescription)
, _tciSystemDefined :: !SystemDefined
, _tciFieldInfoMap :: !(FieldInfoMap fieldInfo)
{ _tciName :: !QualifiedTable
, _tciDescription :: !(Maybe PGDescription)
, _tciSystemDefined :: !SystemDefined
, _tciFieldInfoMap :: !(FieldInfoMap fieldInfo)
, _tciUniqueOrPrimaryKeyConstraints :: ![ConstraintName]
, _tciPrimaryKeyColumns :: ![PGCol]
, _tciViewInfo :: !(Maybe ViewInfo)
, _tciEnumValues :: !(Maybe EnumValues)
, _tciCustomConfig :: !TableConfig
, _tciPrimaryKeyColumns :: ![PGCol]
, _tciViewInfo :: !(Maybe ViewInfo)
, _tciEnumValues :: !(Maybe EnumValues)
, _tciCustomConfig :: !TableConfig
} deriving (Show, Eq)
$(deriveToJSON (aesonDrop 4 snakeCase) ''TableCoreInfo)
$(makeLenses ''TableCoreInfo)
data TableInfo
= TableInfo
{ _tiCoreInfo :: TableCoreInfo FieldInfo
, _tiRolePermInfoMap :: !RolePermInfoMap
, _tiEventTriggerInfoMap :: !EventTriggerInfoMap
{ _tiCoreInfo :: TableCoreInfo FieldInfo
, _tiRolePermInfoMap :: !RolePermInfoMap
, _tiEventTriggerInfoMap :: !EventTriggerInfoMap
} deriving (Show, Eq)
$(deriveToJSON (aesonDrop 3 snakeCase) ''TableInfo)
$(makeLenses ''TableInfo)
@ -436,7 +439,7 @@ data SchemaCache
, scGCtxMap :: !GC.GCtxMap
, scDefaultRemoteGCtx :: !GC.GCtx
, scDepMap :: !DepMap
, scInconsistentObjs :: ![InconsistentMetadataObj]
, scInconsistentObjs :: ![InconsistentMetadata]
} deriving (Show, Eq)
$(deriveToJSON (aesonDrop 2 snakeCase) ''SchemaCache)

View File

@ -1,11 +1,13 @@
{-# LANGUAGE Arrows #-}
-- | Types and functions used in the process of building the schema cache from metadata information
-- stored in the @hdb_catalog@ schema in Postgres.
module Hasura.RQL.Types.SchemaCache.Build
( CollectedInfo(..)
, partitionCollectedInfo
, CacheBuildM
, recordInconsistency
, recordInconsistencies
, recordDependencies
, withRecordInconsistency
@ -17,23 +19,26 @@ module Hasura.RQL.Types.SchemaCache.Build
, withNewInconsistentObjsCheck
) where
import Hasura.Prelude
import Hasura.Prelude
import qualified Data.HashMap.Strict.Extended as M
import qualified Data.Sequence as Seq
import qualified Data.HashMap.Strict.Extended as M
import qualified Data.Sequence as Seq
import qualified Data.Text as T
import Data.Aeson (toJSON)
import Control.Arrow.Extended
import Data.Aeson (toJSON)
import Data.List (nub)
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.Metadata
import Hasura.RQL.Types.RemoteSchema (RemoteSchemaName)
import Hasura.RQL.Types.SchemaCache
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.Metadata
import Hasura.RQL.Types.RemoteSchema (RemoteSchemaName)
import Hasura.RQL.Types.SchemaCache
-- ----------------------------------------------------------------------------
-- types used during schema cache construction
data CollectedInfo
= CIInconsistency !InconsistentMetadataObj
= CIInconsistency !InconsistentMetadata
| CIDependency
!MetadataObject -- ^ for error reporting on missing dependencies
!SchemaObjId
@ -42,7 +47,7 @@ data CollectedInfo
partitionCollectedInfo
:: Seq CollectedInfo
-> ([InconsistentMetadataObj], [(MetadataObject, SchemaObjId, SchemaDependency)])
-> ([InconsistentMetadata], [(MetadataObject, SchemaObjId, SchemaDependency)])
partitionCollectedInfo =
flip foldr ([], []) \info (inconsistencies, dependencies) -> case info of
CIInconsistency inconsistency -> (inconsistency:inconsistencies, dependencies)
@ -50,20 +55,30 @@ partitionCollectedInfo =
let dependency = (metadataObject, objectId, schemaDependency)
in (inconsistencies, dependency:dependencies)
type CacheBuildM = MonadWriter (Seq CollectedInfo)
recordInconsistency :: (ArrowWriter (Seq CollectedInfo) arr) => (MetadataObject, Text) `arr` ()
recordInconsistency = first (arr (:[])) >>> recordInconsistencies
recordInconsistency :: (CacheBuildM m) => MetadataObject -> Text -> m ()
recordInconsistency metadataObject reason =
tell $ Seq.singleton $ CIInconsistency (InconsistentMetadataObj metadataObject reason)
recordInconsistencies :: (ArrowWriter (Seq CollectedInfo) arr) => ([MetadataObject], Text) `arr` ()
recordInconsistencies = proc (metadataObjects, reason) ->
tellA -< Seq.fromList $ map (CIInconsistency . InconsistentObject reason) metadataObjects
recordDependencies :: (CacheBuildM m) => MetadataObject -> SchemaObjId -> [SchemaDependency] -> m ()
recordDependencies metadataObject schemaObjectId =
tell . Seq.fromList . fmap (CIDependency metadataObject schemaObjectId)
recordDependencies
:: (ArrowWriter (Seq CollectedInfo) arr)
=> (MetadataObject, SchemaObjId, [SchemaDependency]) `arr` ()
recordDependencies = proc (metadataObject, schemaObjectId, dependencies) ->
tellA -< Seq.fromList $ map (CIDependency metadataObject schemaObjectId) dependencies
withRecordInconsistency :: (QErrM m, CacheBuildM m) => MetadataObject -> m a -> m (Maybe a)
withRecordInconsistency metadataObject m = (Just <$> m) `catchError` \err -> do
let inconsistentObject = InconsistentMetadataObj metadataObject (qeError err)
Nothing <$ tell (Seq.singleton $ CIInconsistency inconsistentObject)
withRecordInconsistency
:: (ArrowChoice arr, ArrowWriter (Seq CollectedInfo) arr)
=> ErrorA QErr arr (e, s) a
-> arr (e, (MetadataObject, s)) (Maybe a)
withRecordInconsistency f = proc (e, (metadataObject, s)) -> do
result <- runErrorA f -< (e, s)
case result of
Left err -> do
recordInconsistency -< (metadataObject, qeError err)
returnA -< Nothing
Right v -> returnA -< Just v
-- ----------------------------------------------------------------------------
-- operations for triggering a schema cache rebuild
@ -98,15 +113,16 @@ buildSchemaCacheFor objectId = do
buildSchemaCache
newSchemaCache <- askSchemaCache
let diffInconsistentObjects = M.differenceOn (_moId . _imoObject) `on` scInconsistentObjs
let diffInconsistentObjects = M.difference `on` (groupInconsistentMetadataById . scInconsistentObjs)
newInconsistentObjects = newSchemaCache `diffInconsistentObjects` oldSchemaCache
for_ (M.lookup objectId newInconsistentObjects) $ \matchingObject ->
throw400 ConstraintViolation (_imoReason matchingObject)
for_ (M.lookup objectId newInconsistentObjects) $ \matchingObjects -> do
let reasons = T.intercalate ", " $ map imReason $ toList matchingObjects
throwError (err400 ConstraintViolation reasons) { qeInternal = Just $ toJSON matchingObjects }
unless (null newInconsistentObjects) $
throwError (err400 Unexpected "cannot continue due to new inconsistent metadata")
{ qeInternal = Just $ toJSON (M.elems newInconsistentObjects) }
{ qeInternal = Just $ toJSON (nub . concatMap toList $ M.elems newInconsistentObjects) }
-- | Like 'buildSchemaCache', but fails if there is any inconsistent metadata.
buildSchemaCacheStrict :: (QErrM m, CacheRWM m) => m ()
@ -118,7 +134,7 @@ buildSchemaCacheStrict = do
let err = err400 Unexpected "cannot continue due to inconsistent metadata"
throwError err{ qeInternal = Just $ toJSON inconsObjs }
-- | Executes the given action, and if any new 'InconsistentMetadataObj's are added to the schema
-- | Executes the given action, and if any new 'InconsistentMetadata's are added to the schema
-- cache as a result of its execution, raises an error.
withNewInconsistentObjsCheck :: (QErrM m, CacheRM m) => m a -> m a
withNewInconsistentObjsCheck action = do
@ -126,8 +142,9 @@ withNewInconsistentObjsCheck action = do
result <- action
currentObjects <- scInconsistentObjs <$> askSchemaCache
let newInconsistentObjects =
M.elems $ M.differenceOn (_moId . _imoObject) currentObjects originalObjects
let diffInconsistentObjects = M.difference `on` groupInconsistentMetadataById
newInconsistentObjects =
nub $ concatMap toList $ M.elems (currentObjects `diffInconsistentObjects` originalObjects)
unless (null newInconsistentObjects) $
throwError (err500 Unexpected "cannot continue due to newly found inconsistent metadata")
{ qeInternal = Just $ toJSON newInconsistentObjects }

View File

@ -157,7 +157,7 @@ class ToTxt a where
newtype TableName
= TableName { getTableTxt :: T.Text }
deriving (Show, Eq, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift, Data, Generic, Arbitrary)
deriving (Show, Eq, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift, Data, Generic, Arbitrary, NFData)
instance IsIden TableName where
toIden (TableName t) = Iden t
@ -201,7 +201,7 @@ isView _ = False
newtype ConstraintName
= ConstraintName { getConstraintTxt :: T.Text }
deriving (Show, Eq, DQuote, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Hashable, Lift)
deriving (Show, Eq, DQuote, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Hashable, Lift, NFData)
instance IsIden ConstraintName where
toIden (ConstraintName t) = Iden t
@ -211,7 +211,7 @@ instance ToSQL ConstraintName where
newtype FunctionName
= FunctionName { getFunctionTxt :: T.Text }
deriving (Show, Eq, Ord, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Hashable, Lift, Data, Generic, Arbitrary)
deriving (Show, Eq, Ord, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Hashable, Lift, Data, Generic, Arbitrary, NFData)
instance IsIden FunctionName where
toIden (FunctionName t) = Iden t
@ -227,7 +227,7 @@ instance ToTxt FunctionName where
newtype SchemaName
= SchemaName { getSchemaTxt :: T.Text }
deriving (Show, Eq, Ord, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift, Data, Generic, Arbitrary)
deriving (Show, Eq, Ord, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift, Data, Generic, Arbitrary, NFData)
publicSchema :: SchemaName
publicSchema = SchemaName "public"
@ -246,6 +246,7 @@ data QualifiedObject a
{ qSchema :: !SchemaName
, qName :: !a
} deriving (Show, Eq, Functor, Ord, Generic, Lift, Data)
instance (NFData a) => NFData (QualifiedObject a)
instance (FromJSON a) => FromJSON (QualifiedObject a) where
parseJSON v@(String _) =
@ -295,11 +296,11 @@ type QualifiedFunction = QualifiedObject FunctionName
newtype PGDescription
= PGDescription { getPGDescription :: T.Text }
deriving (Show, Eq, FromJSON, ToJSON, Q.FromCol)
deriving (Show, Eq, FromJSON, ToJSON, Q.FromCol, NFData)
newtype PGCol
= PGCol { getPGColTxt :: T.Text }
deriving (Show, Eq, Ord, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, ToJSONKey, FromJSONKey, Lift, Data, Generic, Arbitrary)
deriving (Show, Eq, Ord, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, ToJSONKey, FromJSONKey, Lift, Data, Generic, Arbitrary, NFData)
instance IsIden PGCol where
toIden (PGCol t) = Iden t
@ -337,6 +338,7 @@ data PGScalarType
| PGRaster
| PGUnknown !T.Text
deriving (Show, Eq, Lift, Generic, Data)
instance NFData PGScalarType
instance Hashable PGScalarType
@ -534,7 +536,8 @@ data PGTypeKind
| PGKindRange
| PGKindPseudo
| PGKindUnknown !T.Text
deriving (Show, Eq)
deriving (Show, Eq, Generic)
instance NFData PGTypeKind
instance FromJSON PGTypeKind where
parseJSON = withText "postgresTypeKind" $
@ -562,7 +565,8 @@ data QualifiedPGType
{ _qptSchema :: !SchemaName
, _qptName :: !PGScalarType
, _qptType :: !PGTypeKind
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance NFData QualifiedPGType
$(deriveJSON (aesonDrop 4 snakeCase) ''QualifiedPGType)
isBaseType :: QualifiedPGType -> Bool

View File

@ -115,7 +115,7 @@ isAdminSecretSet _ = boolToText True
getSCFromRef :: (MonadIO m) => SchemaCacheRef -> m SchemaCache
getSCFromRef scRef = lastBuiltSchemaCache . fst <$> liftIO (readIORef $ _scrCache scRef)
logInconsObjs :: L.Logger L.Hasura -> [InconsistentMetadataObj] -> IO ()
logInconsObjs :: L.Logger L.Hasura -> [InconsistentMetadata] -> IO ()
logInconsObjs logger objs =
unless (null objs) $ L.unLogger logger $ mkInconsMetadataLog objs

View File

@ -91,7 +91,7 @@ instance ToEngineLog MetadataLog Hasura where
toEngineLog ml =
(mlLogLevel ml, ELTInternal ILTMetadata, toJSON ml)
mkInconsMetadataLog :: [InconsistentMetadataObj] -> MetadataLog
mkInconsMetadataLog :: [InconsistentMetadata] -> MetadataLog
mkInconsMetadataLog objs =
MetadataLog LevelWarn "Inconsistent Metadata!" $
object [ "objects" .= objs]

View File

@ -16,8 +16,8 @@ module Hasura.Server.Migrate
, dropCatalog
) where
import Control.Monad.Unique
import Data.Time.Clock (UTCTime)
import Control.Monad.Unique
import Hasura.Prelude
@ -29,15 +29,14 @@ import qualified Database.PG.Query.Connection as Q
import qualified Language.Haskell.TH.Lib as TH
import qualified Language.Haskell.TH.Syntax as TH
import Hasura.Logging (Hasura, LogLevel (..),
ToEngineLog (..))
import Hasura.Logging (Hasura, LogLevel (..), ToEngineLog (..))
import Hasura.RQL.DDL.Schema
import Hasura.RQL.Types
import Hasura.Server.Logging (StartupLog (..))
import Hasura.Server.Migrate.Version (latestCatalogVersion,
latestCatalogVersionString)
import Hasura.Server.Query
import Hasura.SQL.Types
import Hasura.RQL.DDL.Schema
dropCatalog :: (MonadTx m) => m ()
dropCatalog = liftTx $ Q.catchE defaultTxErrorHandler $ do
@ -66,6 +65,9 @@ instance ToEngineLog MigrationResult Hasura where
<> latestCatalogVersionString <> "."
}
-- see Note [Specialization of buildRebuildableSchemaCache]
{-# SPECIALIZE migrateCatalog :: UTCTime -> Run (MigrationResult, RebuildableSchemaCache Run) #-}
migrateCatalog
:: forall m
. ( MonadIO m

View File

@ -1,16 +1,16 @@
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE UndecidableInstances #-}
module Hasura.Server.Query where
import Control.Lens
import Control.Monad.Trans.Control (MonadBaseControl (..))
import Control.Monad.Unique
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.Time (UTCTime)
import Language.Haskell.TH.Syntax (Lift)
import Control.Monad.Unique
import Control.Monad.Trans.Control (MonadBaseControl(..))
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
@ -179,6 +179,9 @@ instance HasHttpManager Run where
instance HasSQLGenCtx Run where
askSQLGenCtx = asks _rcSqlGenCtx
-- see Note [Specialization of buildRebuildableSchemaCache]
{-# SPECIALIZE buildRebuildableSchemaCache :: Run (RebuildableSchemaCache Run) #-}
fetchLastUpdate :: Q.TxE QErr (Maybe (InstanceId, UTCTime))
fetchLastUpdate = do
Q.withQE defaultTxErrorHandler

View File

@ -211,7 +211,7 @@ refreshSchemaCache sqlGenCtx pool logger httpManager cacheRef threadType msg = d
& runCacheRWT rebuildableCache
& peelRun runCtx pgCtx PG.ReadWrite
case resE of
Left e -> logError logger threadType $ TEQueryError e
Left e -> logError logger threadType $ TEQueryError e
Right () -> logInfo logger threadType $ object ["message" .= msg]
where
runCtx = RunCtx adminUserInfo httpManager sqlGenCtx

View File

@ -1,4 +1,4 @@
{-# OPTIONS_GHC -fforce-recomp #-}
-- {-# OPTIONS_GHC -fforce-recomp #-}
module Hasura.Server.Version
( currentVersion
, consoleVersion

View File

@ -4,12 +4,13 @@ module Hasura.IncrementalSpec (spec) where
import Hasura.Prelude
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
import Control.Arrow.Extended
import Test.Hspec
import qualified Hasura.Incremental as Inc
import qualified Hasura.Incremental as Inc
spec :: Spec
spec = do
@ -19,8 +20,8 @@ spec = do
add1 = modify' (+1)
rule = proc (a, b) -> do
Inc.cache $ Inc.rule (\_ -> add1) -< a
Inc.cache $ Inc.rule (\_ -> add1 *> add1) -< b
Inc.cache $ arrM (\_ -> add1) -< a
Inc.cache $ arrM (\_ -> add1 *> add1) -< b
let (result1, state1) = runState (Inc.build rule (False, False)) 0
state1 `shouldBe` 3
@ -33,8 +34,11 @@ spec = do
it "preserves incrementalization when entries dont change" $ do
let rule :: MonadWriter (S.HashSet (String, Integer)) m
=> Inc.Rule m (M.HashMap String Integer) (M.HashMap String Integer)
rule = Inc.keyed . Inc.cache . Inc.rule $ \(k, v) ->
tell (S.singleton (k, v)) $> (v * 2)
rule = proc m ->
(| Inc.keyed (\k v -> do
Inc.cache $ arrM (tell . S.singleton) -< (k, v)
returnA -< v * 2)
|) m
let (result1, log1) = runWriter . Inc.build rule $ M.fromList [("a", 1), ("b", 2)]
Inc.result result1 `shouldBe` M.fromList [("a", 2), ("b", 4)]

View File

@ -4,20 +4,21 @@ module Hasura.Server.MigrateSpec (CacheRefT(..), spec) where
import Hasura.Prelude
import Data.Time.Clock (getCurrentTime)
import Control.Concurrent.MVar.Lifted
import Control.Monad.Trans.Control (MonadBaseControl)
import Control.Monad.Unique
import Control.Natural ((:~>) (..))
import Data.Time.Clock (getCurrentTime)
import Data.Tuple (swap)
import Test.Hspec.Core.Spec
import Test.Hspec.Expectations.Lifted
import Control.Concurrent.MVar.Lifted
import Control.Monad.Unique
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Tuple (swap)
import Control.Natural ((:~>)(..))
import qualified Database.PG.Query as Q
import qualified Database.PG.Query as Q
import Hasura.RQL.DDL.Metadata (ClearMetadata (..), runClearMetadata)
import Hasura.RQL.Types
import Hasura.RQL.DDL.Metadata (ClearMetadata (..),
runClearMetadata)
import Hasura.RQL.DDL.Schema
import Hasura.RQL.Types
import Hasura.Server.Migrate
import Hasura.Server.PGDump

View File

@ -2,32 +2,31 @@ module Main (main) where
import Hasura.Prelude
import Control.Concurrent.MVar
import Control.Natural ((:~>) (..))
import Data.Time.Clock (getCurrentTime)
import Options.Applicative
import System.Environment (getEnvironment)
import System.Exit (exitFailure)
import System.Environment (getEnvironment)
import System.Exit (exitFailure)
import Test.Hspec
import Control.Concurrent.MVar
import Data.Time.Clock (getCurrentTime)
import Control.Natural ((:~>)(..))
import qualified Data.Aeson as A
import qualified Data.ByteString.Lazy.Char8 as BL
import qualified Database.PG.Query as Q
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Client.TLS as HTTP
import qualified Test.Hspec.Runner as Hspec
import qualified Database.PG.Query as Q
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Client.TLS as HTTP
import qualified Test.Hspec.Runner as Hspec
import Hasura.Db (PGExecCtx (..))
import Hasura.RQL.Types (SQLGenCtx (..), adminUserInfo)
import Hasura.Server.Init (RawConnInfo, mkConnInfo,
mkRawConnInfo, parseRawConnInfo,
runWithEnv)
import Hasura.Server.Query (Run, RunCtx (..), peelRun)
import Hasura.Db (PGExecCtx (..))
import Hasura.RQL.Types (SQLGenCtx (..), adminUserInfo)
import Hasura.Server.Init (RawConnInfo, mkConnInfo, mkRawConnInfo,
parseRawConnInfo, runWithEnv)
import Hasura.Server.Migrate
import Hasura.Server.Query (Run, RunCtx (..), peelRun)
import qualified Hasura.IncrementalSpec as IncrementalSpec
import qualified Hasura.RQL.MetadataSpec as MetadataSpec
import qualified Hasura.Server.MigrateSpec as MigrateSpec
import qualified Hasura.IncrementalSpec as IncrementalSpec
import qualified Hasura.RQL.MetadataSpec as MetadataSpec
import qualified Hasura.Server.MigrateSpec as MigrateSpec
data TestSuites
= AllSuites !RawConnInfo
@ -43,7 +42,7 @@ main = parseArgs >>= \case
postgresSpecs <- buildPostgresSpecs pgConnOptions
runHspec (unitSpecs *> postgresSpecs)
SingleSuite suite -> case suite of
UnitSuite -> runHspec unitSpecs
UnitSuite -> runHspec unitSpecs
PostgresSuite pgConnOptions -> runHspec =<< buildPostgresSpecs pgConnOptions
unitSpecs :: Spec

View File

@ -9,7 +9,7 @@
definition:
function: full_name
response:
path: "$.args.table"
path: $.args.table
error: table "random" does not exist
code: not-exists
@ -24,13 +24,19 @@
definition:
function: full_name
response:
internal:
- definition:
definition:
function: full_name
table_argument:
name: first_name
comment:
table: author
reason: field definition conflicts with postgres column
type: computed_field
path: $.args
error: |-
in table "author": in computed field "first_name": conflicting definitions for field "first_name":
postgres column
computed field
error: field definition conflicts with postgres column
code: constraint-violation
- description: Try adding computed field with invalid function
url: /v1/query
status: 400
@ -42,12 +48,21 @@
definition:
function: random_function
response:
internal:
- definition:
definition:
function: random_function
table_argument:
name: full_name
comment:
table: author
reason: 'in table "author": in computed field "full_name": no such function
exists in postgres : "random_function"'
type: computed_field
path: $.args
error: >-
in table "author": in computed field "full_name":
no such function exists in postgres : "random_function"
error: 'in table "author": in computed field "full_name": no such function exists
in postgres : "random_function"'
code: constraint-violation
- description: Try adding computed field with invalid table argument name
url: /v1/query
status: 400
@ -60,12 +75,23 @@
function: full_name
table_argument: random
response:
internal:
- definition:
definition:
function: full_name
table_argument: random
name: full_name
comment:
table: author
reason: 'in table "author": in computed field "full_name": the computed field
"full_name" cannot be added to table "author" because "random" is not an input
argument of "full_name" function'
type: computed_field
path: $.args
error: >-
in table "author": in computed field "full_name": the computed field "full_name" cannot be
added to table "author" because "random" is not an input argument of "full_name" function
error: 'in table "author": in computed field "full_name": the computed field "full_name"
cannot be added to table "author" because "random" is not an input argument
of "full_name" function'
code: constraint-violation
- description: Try adding computed field with a volatile function
url: /v1/query
status: 400
@ -78,14 +104,27 @@
function: fetch_articles_volatile
table_argument: random
response:
internal:
- definition:
definition:
function: fetch_articles_volatile
table_argument: random
name: get_articles
comment:
table: author
reason: "in table \"author\": in computed field \"get_articles\": the computed\
\ field \"get_articles\" cannot be added to table \"author\" for the following\
\ reasons:\n • the function \"fetch_articles_volatile\" is of type VOLATILE;\
\ cannot be added as a computed field\n • \"random\" is not an input argument\
\ of \"fetch_articles_volatile\" function\n"
type: computed_field
path: $.args
error: >
in table "author": in computed field "get_articles": the computed field "get_articles" cannot
be added to table "author" for the following reasons:
• the function "fetch_articles_volatile" is of type VOLATILE; cannot be added as a computed field
• "random" is not an input argument of "fetch_articles_volatile" function
error: "in table \"author\": in computed field \"get_articles\": the computed\
\ field \"get_articles\" cannot be added to table \"author\" for the following\
\ reasons:\n • the function \"fetch_articles_volatile\" is of type VOLATILE;\
\ cannot be added as a computed field\n • \"random\" is not an input argument\
\ of \"fetch_articles_volatile\" function\n"
code: constraint-violation
- description: Try adding a computed field with a function with no input arguments
url: /v1/query
status: 400
@ -97,12 +136,23 @@
definition:
function: hello_world
response:
internal:
- definition:
definition:
function: hello_world
table_argument:
name: hello_world
comment:
table: author
reason: 'in table "author": in computed field "hello_world": the computed field
"hello_world" cannot be added to table "author" because the function "hello_world"
has no input arguments'
type: computed_field
path: $.args
error: >-
in table "author": in computed field "hello_world": the computed field "hello_world" cannot be
added to table "author" because the function "hello_world" has no input arguments
error: 'in table "author": in computed field "hello_world": the computed field
"hello_world" cannot be added to table "author" because the function "hello_world"
has no input arguments'
code: constraint-violation
- description: Try adding a computed field with first argument as table argument
url: /v1/query
status: 400
@ -114,10 +164,24 @@
definition:
function: fetch_articles
response:
internal:
- definition:
definition:
function: fetch_articles
table_argument:
name: get_articles
comment:
table: author
reason: "in table \"author\": in computed field \"get_articles\": the computed\
\ field \"get_articles\" cannot be added to table \"author\" for the following\
\ reasons:\n • first argument of the function \"fetch_articles\" is not COMPOSITE\
\ type\n • first argument of the function \"fetch_articles\" of type \"pg_catalog.text\"\
\ is not the table to which the computed field is being added\n"
type: computed_field
path: $.args
error: >
in table "author": in computed field "get_articles": the computed field "get_articles" cannot
be added to table "author" for the following reasons:
• first argument of the function "fetch_articles" is not COMPOSITE type
• first argument of the function "fetch_articles" of type "pg_catalog.text" is not the table to which the computed field is being added
error: "in table \"author\": in computed field \"get_articles\": the computed\
\ field \"get_articles\" cannot be added to table \"author\" for the following\
\ reasons:\n • first argument of the function \"fetch_articles\" is not COMPOSITE\
\ type\n • first argument of the function \"fetch_articles\" of type \"pg_catalog.text\"\
\ is not the table to which the computed field is being added\n"
code: constraint-violation

View File

@ -40,12 +40,27 @@
- random
filter: {}
response:
path: "$.args"
error: >-
in table "author": in permission for role "user": computed field "random" does not exist
internal:
- definition:
role: user
comment:
permission:
allow_aggregations: false
computed_fields:
- full_name
- random
columns: '*'
filter: {}
table: author
reason: 'in table "author": in permission for role "user": computed field "random"
does not exist'
type: select_permission
path: $.args
error: 'in table "author": in permission for role "user": computed field "random"
does not exist'
code: constraint-violation
- description: Create select permission with computed field which returns a set of table
- description: Create select permission with computed field which returns a set of
table
url: /v1/query
status: 400
query:
@ -60,13 +75,27 @@
- get_articles
filter: {}
response:
path: "$.args"
error: >-
in table "author": in permission for role "user": select permissions on computed field
"get_articles" are auto-derived from the permissions on its returning table "article" and
cannot be specified manually
internal:
- definition:
role: user
comment:
permission:
allow_aggregations: false
computed_fields:
- full_name
- get_articles
columns: '*'
filter: {}
table: author
reason: 'in table "author": in permission for role "user": select permissions
on computed field "get_articles" are auto-derived from the permissions on
its returning table "article" and cannot be specified manually'
type: select_permission
path: $.args
error: 'in table "author": in permission for role "user": select permissions on
computed field "get_articles" are auto-derived from the permissions on its returning
table "article" and cannot be specified manually'
code: constraint-violation
- description: Create select permission on article table
url: /v1/query
status: 200
@ -106,7 +135,7 @@
table: author
name: full_name
response:
path: "$.args"
path: $.args
error: 'cannot drop due to the following dependent objects : permission author.user.select'
code: dependency-error

View File

@ -2,9 +2,23 @@ description: Create permission with admin as role (error)
url: /v1/query
status: 400
response:
internal:
- definition:
role: admin
comment:
permission:
allow_aggregations: false
computed_fields: []
columns: '*'
filter:
id: X-Hasura-User-Id
table: author
reason: 'in table "author": in permission for role "admin": cannot define permission
for admin role'
type: select_permission
path: $.args
error: >-
in table "author": in permission for role "admin": duplicate definition for select permission
error: 'in table "author": in permission for role "admin": cannot define permission
for admin role'
code: constraint-violation
query:
type: create_select_permission

View File

@ -6,6 +6,18 @@ response:
error: >-
in table "author": in relationship "articles": no foreign constraint exists on the given column
code: constraint-violation
internal:
- type: array_relation
definition:
name: articles
table: author
using:
foreign_key_constraint_on:
column: published_on
table: article
comment:
reason: >-
in table "author": in relationship "articles": no foreign constraint exists on the given column
query:
type: create_array_relationship
args:

View File

@ -6,6 +6,16 @@ response:
in table "article": in relationship "author": no foreign constraint exists on the given column
path: $.args
code: constraint-violation
internal:
- type: object_relation
definition:
name: author
table: article
using:
foreign_key_constraint_on: published_on
comment:
reason: >-
in table "article": in relationship "author": no foreign constraint exists on the given column
query:
type: create_object_relationship
args:

View File

@ -292,8 +292,8 @@ def equal_CommentedMap(m1, m2):
else:
m1_l = sorted(list(m1.items()))
m2_l = sorted(list(m2.items()))
return (len(m1_l) == len(m2_l) and
all(k1 == k2 and equal_CommentedMap(v1,v2)
return (len(m1_l) == len(m2_l) and
all(k1 == k2 and equal_CommentedMap(v1,v2)
for (k1,v1),(k2,v2) in zip(m1_l,m2_l)))
# else this is a scalar:
else:
@ -338,7 +338,7 @@ def check_query_f(hge_ctx, f, transport='http', add_auth=True):
"\n NOTE: if this case was marked 'xfail' this won't be correct!"
)
c.seek(0)
c.write(yml.dump(conf))
yml.dump(conf, stream=c)
c.truncate()
@ -390,7 +390,7 @@ def collapse_order_not_selset(result_inp, query):
# Use this since jsondiff seems to produce object/dict structures that can't
# always be serialized to json.
# Copy-pasta from: https://stackoverflow.com/q/12734517/176841
# Copy-pasta from: https://stackoverflow.com/q/12734517/176841
def stringify_keys(d):
"""Convert a dict's keys to strings if they are not."""
if isinstance(d, dict):