Add support for fine-grained dependency tracking to Incremental

This commit is contained in:
Alexis King 2019-12-15 07:28:23 -06:00
parent 89af4ae4d7
commit fa9077f774
34 changed files with 942 additions and 396 deletions

View File

@ -91,6 +91,8 @@ library
, http-client-tls
, profunctors
, deepseq
, dependent-map >=0.2.4 && <0.4
, dependent-sum >=0.4 && <0.5
-- `these >=1` is split into several different packages, but our current stack
-- resolver has `these <1`; when we upgrade we just need to add an extra
@ -232,7 +234,12 @@ library
, Data.Aeson.Ordered
other-modules: Hasura.Server.Auth.JWT
other-modules: Hasura.Incremental.Select
, Hasura.Incremental.Internal.Cache
, Hasura.Incremental.Internal.Dependency
, Hasura.Incremental.Internal.Rule
, Hasura.Server.Auth.JWT
, Hasura.Server.Middleware
, Hasura.Server.Cors
, Hasura.Server.CheckUpdates

View File

@ -13,6 +13,8 @@ module Control.Arrow.Extended
, (>->)
, (<-<)
, dup
, bothA
, orA
, foldlA'
, traverseA_
@ -52,6 +54,17 @@ dup :: (Arrow arr) => arr a (a, a)
dup = arr \x -> (x, x)
{-# INLINE dup #-}
bothA :: (Arrow arr) => arr a b -> arr (a, a) (b, b)
bothA f = f *** f
{-# INLINE bothA #-}
orA :: (ArrowChoice arr) => arr a Bool -> arr b Bool -> arr (a, b) Bool
orA f g = proc (a, b) -> do
c <- f -< a
if c then returnA -< True else g -< b
{-# INLINABLE orA #-}
{-# RULES "orA/arr" forall f g. arr f `orA` arr g = arr (f `orA` g) #-}
-- | 'foldl'' lifted to arrows. See also Note [Weird control operator types].
foldlA' :: (ArrowChoice arr, Foldable t) => arr (e, (b, (a, s))) b -> arr (e, (b, (t a, s))) b
foldlA' f = arr (\(e, (v, (xs, s))) -> (e, (v, (toList xs, s)))) >>> go where

View File

@ -18,10 +18,10 @@ module Control.Arrow.Trans
, WriterA(WriterA, runWriterA)
) where
import Prelude hiding ((.), id)
import Prelude hiding (id, (.))
import Control.Arrow
import Control.Category
import Control.Arrow
import Control.Category
import Control.Monad.Error.Class
import Control.Monad.Reader.Class
import Control.Monad.Writer.Class

View File

@ -6,6 +6,7 @@ import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Data.Has
import Hasura.Incremental (Cacheable)
import Language.Haskell.TH.Syntax (Lift)
import qualified Data.HashMap.Strict as Map
@ -90,6 +91,7 @@ data TableCustomRootFields
, _tcrfDelete :: !(Maybe G.Name)
} deriving (Show, Eq, Lift, Generic)
instance NFData TableCustomRootFields
instance Cacheable TableCustomRootFields
$(deriveToJSON (aesonDrop 5 snakeCase){omitNothingFields=True} ''TableCustomRootFields)
instance FromJSON TableCustomRootFields where

View File

@ -1,5 +1,3 @@
{-# LANGUAGE Arrows #-}
-- | A simple implementation of /incremental build rules/, which can be used to avoid unnecessary
-- recomputation on incrementally-changing input. See 'Rule' for more details.
module Hasura.Incremental
@ -10,331 +8,18 @@ module Hasura.Incremental
, rebuildRule
, result
, ArrowCache(..)
, ArrowDistribute(..)
, ArrowCache(..)
, Dependency
, Selector
, selectD
, selectKeyD
, Cacheable(..)
, Accesses
) where
import Hasura.Prelude hiding (id, (.))
import qualified Data.HashMap.Strict as M
import Control.Applicative
import Control.Arrow.Extended
import Control.Category
import Data.Profunctor
import Data.Tuple (swap)
-- | A value of type @'Rule' m a b@ is a /build rule/: a computation that describes how to build a
-- value of type @b@ from a value of type @a@ in a monad @m@. What distinguishes @'Rule' m a b@ from
-- an ordinary function of type @a -> m b@ is that it can be made /incremental/ (in the sense of
-- “incremental compilation”) — after executing it, future executions can perform a subset of the
-- required work if only a portion of the input changed.
--
-- To achieve this, 'Rule's have a more restrictive interface: there is no @Monad ('Rule' m a)@
-- instance, for example. Instead, 'Rule's are composed using the 'Arrow' hierarchy of operations,
-- which ensures that the dependency graph of build rules is mostly static (though it may contain
-- conditional branches, and combinators such as 'keyed' can express restricted forms of dynamic
-- dependencies). Each atomic rule may be defined using the 'Monad' instance for @m@, but
-- incrementalization is not supported inside those rules — they are treated as a single, monolithic
-- computation.
--
-- Atomic rules are created with the 'arrM' function, and caching can be added to a rule using the
-- 'cache' combinator. Rules can be executed using the 'build' function, which returns a 'Result'. A
-- 'Result' contains the built value, accessible via 'result', but it also allows supplying a new
-- input value using 'rebuild' to produce a new result incrementally.
newtype Rule m a b
-- Note: this is a CPS encoding of `a -> m (Result m a b)`. In practice, the CPS encoding seems to
-- provide meaningful performance improvements: it cuts down significantly on allocation and is
-- friendlier to GHCs optimizer.
= Rule (forall r. a -> (b -> Rule m a b -> m r) -> m r)
build :: (Applicative m) => Rule m a b -> a -> m (Result m a b)
build (Rule r) a = r a \b r' -> pure $ Result b r'
{-# INLINE build #-}
data Result m a b
= Result
{ result :: !b
, rebuildRule :: !(Rule m a b)
} deriving (Functor)
rebuild :: (Applicative m) => Result m a b -> a -> m (Result m a b)
rebuild = build . rebuildRule
{-# INLINE rebuild #-}
{- Note [Rule rewrite rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As explained by Note [Arrow rewrite rules] in Control.Arrow.Extended, its important to define
type-specific rewrite rules to get good performance with arrows when the concrete type is used. This
is especially important for `Rule`, since the recursive definitions of operations like `.` and `arr`
are very difficult for the optimizer to deal with, and the composition of lots of small rules
created with `arr` is very inefficient.
Since GHC aggressively specializes and inlines class methods, the rules cannot be defined on the
class methods themselves. Instead, the class methods expand to auxiliary definitions, and those
definitions include an INLINABLE[0] pragma that ensures they do not inline until the final
optimization phase. The rules are defined in terms of those definitions, so they will be able to do
their work in prior phases.
Note [Desugaring derived operations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
One subtlety to the above is that we want to define operations in terms of other operations as much
as possible to avoid the need to write an enormous number of rewrite rules, but if we define them
that way directly, then well end up using needlessly inefficient implementations when the
operations arent specialized. Therefore, we provide efficient implementations of operations like
`second`, but aggressively rewrite them in terms of simpler primitives like `first` when GHC is able
to specialize them. -}
rComp :: Rule m a1 b -> Rule m a2 a1 -> Rule m a2 b
Rule f `rComp` Rule g = Rule \a k -> g a \b g' -> f b \c f' -> k c (f' `rComp` g')
{-# INLINABLE[0] rComp #-}
{-# RULES "associate" forall f g h. f `rComp` (g `rComp` h) = (f `rComp` g) `rComp` h #-}
rId :: Rule m a a
rId = Rule \a k -> k a rId
{-# INLINABLE[0] rId #-}
{-# RULES
"f/id" forall f. f `rComp` rId = f
"id/f" forall f. rId `rComp` f = f
#-}
rArr :: (a -> b) -> Rule m a b
rArr f = Rule \a k -> k (f a) (rArr f)
{-# INLINABLE[0] rArr #-}
{-# RULES
"arr/id" rArr (\x -> x) = rId
"arr/const" [1] forall x. rArr (\_ -> x) = rPure x
"arr/arr" forall f g. rArr f `rComp` rArr g = rArr (f . g)
"arr/arr/f" forall f g h. (f `rComp` rArr g) `rComp` rArr h = f `rComp` rArr (g . h)
#-}
rArrM :: (Monad m) => (a -> m b) -> Rule m a b
rArrM f = Rule \a k -> f a >>= \b -> k b (rArrM f)
{-# INLINABLE[0] rArrM #-}
{-# RULES
"arrM/arrM" forall f g. rArrM f `rComp` rArrM g = rArrM (f <=< g)
"arr/arrM" forall f g. rArr f `rComp` rArrM g = rArrM (fmap f . g)
"arrM/arr" forall f g. rArrM f `rComp` rArr g = rArrM (f . g)
"arrM/arrM/f" forall f g h. (f `rComp` rArrM g) `rComp` rArrM h = f `rComp` rArrM (g <=< h)
"arr/arrM/f" forall f g h. (f `rComp` rArr g) `rComp` rArrM h = f `rComp` rArrM (fmap g . h)
"arrM/arr/f" forall f g h. (f `rComp` rArrM g) `rComp` rArr h = f `rComp` rArrM (g . h)
#-}
rFirst :: Rule m a b1 -> Rule m (a, b2) (b1, b2)
rFirst (Rule r) = Rule \(a, c) k -> r a \b r' -> k (b, c) (rFirst r')
{-# INLINABLE[0] rFirst #-}
{-# RULES
"first/id" rFirst rId = rId
"first/arr" forall f. rFirst (rArr f) = rArr (second f)
"first/arrM" forall f. rFirst (rArrM f) = rArrM (runKleisli (first (Kleisli f)))
"first/push" [~1] forall f g. rFirst (f `rComp` g) = rFirst f `rComp` rFirst g
"first/pull" [1] forall f g. rFirst f `rComp` rFirst g = rFirst (f `rComp` g)
"first/f/pull" [1] forall f g h. (f `rComp` rFirst g) `rComp` rFirst h = f `rComp` rFirst (g `rComp` h)
#-}
rLeft :: Rule m a b1 -> Rule m (Either a b2) (Either b1 b2)
rLeft r0 = go r0 where
go (Rule r) = Rule \e k -> case e of
Left a -> r a \b r' -> k (Left b) (go r')
Right c -> k (Right c) (go r0)
{-# INLINABLE[0] rLeft #-}
{-# RULES
"left/id" rLeft rId = rId
"left/arr" forall f. rLeft (rArr f) = rArr (left f)
"left/arrM" forall f. rLeft (rArrM f) = rArrM (runKleisli (left (Kleisli f)))
"left/push" [~1] forall f g. rLeft (f `rComp` g) = rLeft f `rComp` rLeft g
"left/pull" [1] forall f g. rLeft f `rComp` rLeft g = rLeft (f `rComp` g)
"left/f/pull" [1] forall f g h. (f `rComp` rLeft g) `rComp` rLeft h = f `rComp` rLeft (g `rComp` h)
#-}
rPure :: b -> Rule m a b
rPure a = Rule \_ k -> k a (rPure a)
{-# INLINABLE[0] rPure #-}
{-# RULES "pure/push" [~1] rPure = rArr . const #-} -- see Note [Desugaring derived operations]
rSecond :: Rule m a1 b -> Rule m (a2, a1) (a2, b)
rSecond (Rule r) = Rule \(c, a) k -> r a \b r' -> k (c, b) (rSecond r')
{-# INLINABLE[0] rSecond #-}
-- see Note [Desugaring derived operations]
{-# RULES "second/push" [~1] forall f. rSecond f = rArr swap . rFirst f . rArr swap #-}
swapEither :: Either a b -> Either b a
swapEither = either Right Left
{-# INLINE[0] swapEither #-}
rRight :: Rule m a1 b -> Rule m (Either a2 a1) (Either a2 b)
rRight r0 = go r0 where
go (Rule r) = Rule \e k -> case e of
Left c -> k (Left c) (go r0)
Right a -> r a \b r' -> k (Right b) (go r')
{-# INLINABLE[0] rRight #-}
-- see Note [Desugaring derived operations]
{-# RULES "right/push" [~1] forall f. rRight f = rArr swapEither . rLeft f . rArr swapEither #-}
rSplit :: Rule m a1 b1 -> Rule m a2 b2 -> Rule m (a1, a2) (b1, b2)
Rule f `rSplit` Rule g = Rule \(a, b) k -> f a \c f' -> g b \d g' -> k (c, d) (f' `rSplit` g')
{-# INLINABLE[0] rSplit #-}
-- see Note [Desugaring derived operations]
{-# RULES "***/push" [~1] forall f g. f `rSplit` g = rSecond g . rFirst f #-}
rFanout :: Rule m a b1 -> Rule m a b2 -> Rule m a (b1, b2)
Rule f `rFanout` Rule g = Rule \a k -> f a \b f' -> g a \c g' -> k (b, c) (f' `rFanout` g')
{-# INLINABLE[0] rFanout #-}
-- see Note [Desugaring derived operations]
{-# RULES "&&&/push" [~1] forall f g. f `rFanout` g = (f *** g) . rArr (\a -> (a, a)) #-}
rFork :: Rule m a1 b1 -> Rule m a2 b2 -> Rule m (Either a1 a2) (Either b1 b2)
f0 `rFork` g0 = go f0 g0 where
go (Rule f) (Rule g) = Rule \e k -> case e of
Left a -> f a \b f' -> k (Left b) (go f' g0)
Right a -> g a \b g' -> k (Right b) (go f0 g')
{-# INLINABLE[0] rFork #-}
-- see Note [Desugaring derived operations]
{-# RULES "+++/push" [~1] forall f g. f `rFork` g = rRight g . rLeft f #-}
fromEither :: Either a a -> a
fromEither = either id id
{-# INLINE[0] fromEither #-}
rFanin :: Rule m a1 b -> Rule m a2 b -> Rule m (Either a1 a2) b
f0 `rFanin` g0 = go f0 g0 where
go (Rule f) (Rule g) = Rule \e k -> case e of
Left a -> f a \b f' -> k b (go f' g0)
Right a -> g a \b g' -> k b (go f0 g')
{-# INLINABLE[0] rFanin #-}
-- see Note [Desugaring derived operations]
{-# RULES "|||/push" [~1] forall f g. f `rFanin` g = rArr fromEither . (f +++ g) #-}
instance Functor (Rule m a) where
fmap f r = arr f . r
{-# INLINE fmap #-}
instance Applicative (Rule m a) where
pure = rPure
{-# INLINE pure #-}
(<*>) = liftA2 ($)
{-# INLINE (<*>) #-}
liftA2 f g h = arr (uncurry f) . (g &&& h)
{-# INLINE liftA2 #-}
instance Profunctor (Rule m) where
dimap f g r = arr g . r . arr f
{-# INLINE dimap #-}
lmap f r = r . arr f
{-# INLINE lmap #-}
rmap = fmap
{-# INLINE rmap #-}
instance Strong (Rule m) where
first' = rFirst
{-# INLINE first' #-}
second' = rSecond
{-# INLINE second' #-}
instance Choice (Rule m) where
left' = rLeft
{-# INLINE left' #-}
right' = rRight
{-# INLINE right' #-}
instance Category (Rule m) where
id = rId
{-# INLINE id #-}
(.) = rComp
{-# INLINE (.) #-}
instance Arrow (Rule m) where
arr = rArr
{-# INLINE arr #-}
first = rFirst
{-# INLINE first #-}
second = rSecond
{-# INLINE second #-}
(***) = rSplit
{-# INLINE (***) #-}
(&&&) = rFanout
{-# INLINE (&&&) #-}
instance ArrowChoice (Rule m) where
left = rLeft
{-# INLINE left #-}
right = rRight
{-# INLINE right #-}
(+++) = rFork
{-# INLINE (+++) #-}
(|||) = rFanin
{-# INLINE (|||) #-}
instance (Monad m) => ArrowKleisli m (Rule m) where
arrM = rArrM
{-# INLINE arrM #-}
class (Arrow arr) => ArrowCache arr where
-- | Adds equality-based caching to the given arrow. After each execution of the arrow, its input
-- and result values are cached. On the next execution, the new input value is compared via '=='
-- to the previous input value. If they are the same, the previous result is returned /without/
-- re-executing the arrow. Otherwise, the old cached values are discarded, and the arrow is
-- re-executed to produce a new set of cached values.
--
-- Indescriminate use of 'cache' is likely to have little effect except to increase memory usage,
-- since the input and result of each execution must be retained in memory. Avoid using 'cache'
-- around arrows with large input or output that is likely to change often unless profiling
-- indicates it is computationally expensive enough to be worth the memory overhead.
--
-- __Note that only direct inputs and outputs of the given arrow are cached.__ If an arrow
-- provides access to values through a side-channel, they will __not__ participate in caching.
cache :: (Eq a) => arr a b -> arr a b
instance (ArrowChoice arr, ArrowCache arr) => ArrowCache (ErrorA e arr) where
cache (ErrorA f) = ErrorA (cache f)
{-# INLINE cache #-}
instance (Monoid w, ArrowCache arr) => ArrowCache (WriterA w arr) where
cache (WriterA f) = WriterA (cache f)
{-# INLINE cache #-}
instance ArrowCache (Rule m) where
cache (Rule r0) = Rule \a k -> r0 a \b r0' -> k b (cached a b r0')
where
cached a b (Rule r) = Rule \a' k -> if
| a == a' -> k b (cached a b (Rule r))
| otherwise -> r a' \b' r' -> k b' (cached a' b' r')
{-# INLINABLE cache #-}
class (Arrow arr) => ArrowDistribute arr where
-- | Distributes an arrow that operates on key-value pairs, over a 'M.HashMap' in an
-- order-independent way.
--
-- This is intended to be used as a control operator in @proc@ notation; see
-- Note [Weird control operator types] in "Control.Arrow.Extended".
keyed
:: (Eq k, Hashable k)
=> arr (e, (k, (a, s))) b
-> arr (e, (HashMap k a, s)) (HashMap k b)
instance (Monoid w, ArrowDistribute arr) => ArrowDistribute (WriterA w arr) where
keyed (WriterA f) = WriterA (arr (swap . sequence . fmap swap) . keyed f)
{-# INLINE keyed #-}
-- | Unlike 'traverseA', using 'keyed' preserves incrementalization: if the input rule is
-- incremental in its argument, the resulting rule will be incremental as well for any entries in
-- the map that do not change between builds.
instance ArrowDistribute (Rule m) where
keyed
:: forall a b k e s
. (Eq k, Hashable k)
=> Rule m (e, (k, (a, s))) b
-> Rule m (e, (HashMap k a, s)) (HashMap k b)
keyed r0 = keyedWith M.empty
where
keyedWith
:: HashMap k (Rule m (e, (k, (a, s))) b)
-> Rule m (e, (HashMap k a, s)) (HashMap k b)
keyedWith !rs = Rule \(e, (vs, s)) c ->
M.foldrWithKey (process rs e s) (finish c) vs M.empty M.empty
process
:: HashMap k (Rule m (e, (k, (a, s))) b)
-> e -> s -> k -> a
-> (HashMap k b -> HashMap k (Rule m (e, (k, (a, s))) b) -> m r)
-> HashMap k b -> HashMap k (Rule m (e, (k, (a, s))) b) -> m r
process rs e s k a c !vs' !rs' =
let Rule r = M.lookupDefault r0 k rs
in r (e, (k, (a, s))) \b r' -> c (M.insert k b vs') (M.insert k r' rs')
finish
:: (HashMap k b -> Rule m (e, (HashMap k a, s)) (HashMap k b) -> m r)
-> HashMap k b -> HashMap k (Rule m (e, (k, (a, s))) b) -> m r
finish c !vs' !rs' = c vs' (keyedWith rs')
{-# INLINABLE keyed #-}
import Hasura.Incremental.Internal.Cache
import Hasura.Incremental.Internal.Dependency
import Hasura.Incremental.Internal.Rule
import Hasura.Incremental.Select

View File

@ -0,0 +1,70 @@
{-# OPTIONS_HADDOCK not-home #-}
module Hasura.Incremental.Internal.Cache where
import Hasura.Prelude
import Control.Arrow.Extended
import Control.Monad.Unique
import Hasura.Incremental.Internal.Dependency
import Hasura.Incremental.Internal.Rule
import Hasura.Incremental.Select
class (Arrow arr) => ArrowCache arr where
-- | Adds equality-based caching to the given arrow. After each execution of the arrow, its input
-- and result values are cached. On the next execution, the new input value is compared via '=='
-- to the previous input value. If they are the same, the previous result is returned /without/
-- re-executing the arrow. Otherwise, the old cached values are discarded, and the arrow is
-- re-executed to produce a new set of cached values.
--
-- Indescriminate use of 'cache' is likely to have little effect except to increase memory usage,
-- since the input and result of each execution must be retained in memory. Avoid using 'cache'
-- around arrows with large input or output that is likely to change often unless profiling
-- indicates it is computationally expensive enough to be worth the memory overhead.
--
-- __Note that only direct inputs and outputs of the given arrow are cached.__ If an arrow
-- provides access to values through a side-channel, they will __not__ participate in caching.
cache :: (Cacheable a) => arr a b -> arr a b
-- | Creates a new 'Dependency', which allows fine-grained caching of composite values; see the
-- documentation for 'Dependency' for more details.
newDependency :: arr a (Dependency a)
-- | Extract the value from a 'Dependency', incurring a dependency on its entirety. To depend on
-- only a portion of the value, use 'selectD' or 'selectKeyD' before passing it to 'dependOn'.
dependOn :: (Cacheable a) => arr (Dependency a) a
instance (ArrowChoice arr, ArrowCache arr) => ArrowCache (ErrorA e arr) where
cache (ErrorA f) = ErrorA (cache f)
{-# INLINE cache #-}
newDependency = liftA newDependency
{-# INLINE newDependency #-}
dependOn = liftA dependOn
{-# INLINE dependOn #-}
instance (Monoid w, ArrowCache arr) => ArrowCache (WriterA w arr) where
cache (WriterA f) = WriterA (cache f)
{-# INLINE cache #-}
newDependency = liftA newDependency
{-# INLINE newDependency #-}
dependOn = liftA dependOn
{-# INLINE dependOn #-}
instance (MonadUnique m) => ArrowCache (Rule m) where
cache r0 = Rule \s a k -> do
let Rule r = listenAccesses r0
r s a \s' (b, accesses) r' -> k s' b (cached accesses a b r')
where
listenAccesses :: Rule m a b -> Rule m a (b, Accesses)
listenAccesses (Rule r) = Rule \s a k -> r mempty a \s' b r' ->
(k $! (s <> s')) (b, s') (listenAccesses r')
cached accesses a b (Rule r) = Rule \s a' k -> if
| unchanged accesses a a' -> k s b (cached accesses a b (Rule r))
| otherwise -> r s a' \s' (b', accesses') r' -> k s' b' (cached accesses' a' b' r')
newDependency = arrM \a -> newUniqueS <&> \u -> Dependency (DependencyRoot u) a
{-# INLINABLE newDependency #-}
dependOn = Rule \s (Dependency key v) k -> (k $! recordAccess key AccessedAll s) v dependOn

View File

@ -0,0 +1,242 @@
{-# OPTIONS_HADDOCK not-home #-}
{-# LANGUAGE GADTs #-}
-- | Supporting functionality for fine-grained dependency tracking.
module Hasura.Incremental.Internal.Dependency where
import Hasura.Prelude
import qualified Data.Dependent.Map as DM
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.URI.Extended as N
import Control.Applicative
import Data.Aeson (Value)
import Data.Functor.Classes (Eq1 (..), Eq2 (..))
import Data.GADT.Compare
import Data.Int
import Data.Scientific (Scientific)
import Data.Vector (Vector)
import GHC.Generics ((:*:) (..), (:+:) (..), Generic (..), K1 (..),
M1 (..), U1 (..), V1)
import Hasura.Incremental.Select
-- | A 'Dependency' represents a value that a 'Rule' can /conditionally/ depend on. A 'Dependency'
-- is created using 'newDependency', and it can be “opened” again using 'dependOn'. What makes a
-- 'Dependency' useful is the way it cooperates with 'cache'---if a 'Dependency' is passed to a
-- cached rule, but that rule (or any of its sub-rules) never “opens” it using 'dependOn', then
-- subsequent executions of the rule will ignore the 'Dependency' when computing whether or not it
-- is necessary to re-execute the rule.
--
-- The above functionality is useful on its own to express conditional dependencies, but even more
-- useful is the ability to express /partial/ dependencies. For example, if a 'Dependency' contains
-- a 'HashMap', a rule can choose to only depend on the value associated with a particular key by
-- using 'selectKeyD' (or the more general 'selectD'). Only the parts that are actually used will be
-- counted when computing whether a rule needs to be re-executed.
data Dependency a = Dependency !(DependencyKey a) !a
instance (Eq a) => Eq (Dependency a) where
Dependency _ a == Dependency _ b = a == b
-- | Applies a 'Selector' to select part of a 'Dependency'.
selectD :: (Select a) => Selector a b -> Dependency a -> Dependency b
selectD k (Dependency dk a) = Dependency (DependencyChild k dk) (select k a)
-- | Selects a single key from a dependency containing a map-like data structure.
selectKeyD :: (Select a, Selector a ~ ConstS k v) => k -> Dependency a -> Dependency v
selectKeyD = selectD . ConstS
-- | Tracks whether a 'Dependency' is a “root” dependency created by 'newDependency' or a “child”
-- dependency created from an existing dependency using 'selectD'.
data DependencyKey a where
DependencyRoot :: !(UniqueS a) -> DependencyKey a
DependencyChild :: (Select a) => !(Selector a b) -> !(DependencyKey a) -> DependencyKey b
instance GEq DependencyKey where
DependencyRoot a `geq` DependencyRoot b
| Just Refl <- a `geq` b
= Just Refl
DependencyChild a1 a2 `geq` DependencyChild b1 b2
| Just Refl <- a2 `geq` b2
, Just Refl <- a1 `geq` b1
= Just Refl
_ `geq` _ = Nothing
instance GCompare DependencyKey where
DependencyRoot a `gcompare` DependencyRoot b = case gcompare a b of
GLT -> GLT
GEQ -> GEQ
GGT -> GGT
DependencyChild a1 a2 `gcompare` DependencyChild b1 b2 = case gcompare a2 b2 of
GLT -> GLT
GEQ -> case gcompare a1 b1 of
GLT -> GLT
GEQ -> GEQ
GGT -> GGT
GGT -> GGT
DependencyRoot _ `gcompare` DependencyChild _ _ = GLT
DependencyChild _ _ `gcompare` DependencyRoot _ = GGT
-- | A typeclass that implements the dependency-checking machinery used by 'cache'. Morally, this
-- class is like 'Eq', but it only checks the parts of a 'Dependency' that were actually accessed on
-- the previous execution. It is highly unlikely you will need to implement any 'Cacheable'
-- instances yourself; the default implementation uses 'Generic' to derive an instance
-- automatically.
class (Eq a) => Cacheable a where
unchanged :: Accesses -> a -> a -> Bool
default unchanged :: (Generic a, GCacheable (Rep a)) => Accesses -> a -> a -> Bool
unchanged accesses a b = gunchanged (from a) (from b) accesses
{-# INLINABLE unchanged #-}
-- | A mapping from root 'Dependency' keys to the accesses made against those dependencies.
newtype Accesses = Accesses { unAccesses :: DM.DMap UniqueS Access }
instance Semigroup Accesses where
Accesses a <> Accesses b = Accesses $ DM.unionWithKey (const (<>)) a b
instance Monoid Accesses where
mempty = Accesses DM.empty
recordAccess :: DependencyKey a -> Access a -> Accesses -> Accesses
recordAccess depKey !access (Accesses accesses) = case depKey of
DependencyRoot rootKey -> Accesses $ DM.insertWith' (<>) rootKey access accesses
DependencyChild selector parentKey ->
recordAccess parentKey (AccessedParts $ DM.singleton selector access) (Accesses accesses)
-- | Records the accesses made within a single 'Dependency' and its children. The 'Semigroup'
-- instance for 'Access' computes a least upper bound:
--
-- * 'AccessedAll' serves as the top of the lattice and records the dependencys entire value was
-- accessed.
-- * 'AccessedParts' records a set of accesses for individual parts of a dependency.
data Access a where
AccessedAll :: (Cacheable a) => Access a
AccessedParts :: (Select a) => !(DM.DMap (Selector a) Access) -> Access a
instance Semigroup (Access a) where
AccessedAll <> _ = AccessedAll
_ <> AccessedAll = AccessedAll
AccessedParts a <> AccessedParts b = AccessedParts $ DM.unionWithKey (const (<>)) a b
instance (Cacheable a) => Cacheable (Dependency a) where
unchanged accesses (Dependency key1 v1) (Dependency _ v2) =
-- look up which parts of this dependency were previously accessed
case lookupAccess key1 of
-- looking up the access was enough to determine the result
Left result -> result
-- otherwise, look through the accessed children
Right access -> unchangedBy v1 v2 access
where
-- Looks up the Access associated with the given DependencyKey, if it exists.
lookupAccess :: DependencyKey b -> Either Bool (Access b)
lookupAccess = \case
DependencyRoot key -> handleNoAccess $ DM.lookup key (unAccesses accesses)
DependencyChild selector key -> lookupAccess key >>= \case
AccessedAll -> Left (unchanged accesses v1 v2)
AccessedParts parts -> handleNoAccess $ DM.lookup selector parts
where
-- if this dependency was never accessed, then its certainly unchanged
handleNoAccess = maybe (Left True) Right
-- Walks the given values guided by the given Access, checking that all the subparts
-- identified by the AccessedAll leaves are unchanged.
unchangedBy :: forall b. b -> b -> Access b -> Bool
unchangedBy a b = \case
AccessedAll -> unchanged accesses a b
AccessedParts parts -> DM.foldrWithKey reduce True parts
where
reduce :: (Select b) => Selector b c -> Access c -> Bool -> Bool
reduce selector = (&&) . unchangedBy (select selector a) (select selector b)
-- -------------------------------------------------------------------------------------------------
-- boilerplate Cacheable instances
instance Cacheable Char where unchanged _ = (==)
instance Cacheable Double where unchanged _ = (==)
instance Cacheable Int where unchanged _ = (==)
instance Cacheable Int32 where unchanged _ = (==)
instance Cacheable Integer where unchanged _ = (==)
instance Cacheable Scientific where unchanged _ = (==)
instance Cacheable Text where unchanged _ = (==)
instance Cacheable N.URIAuth where unchanged _ = (==)
instance (Cacheable a) => Cacheable (Seq a) where
unchanged = liftEq . unchanged
instance (Cacheable a) => Cacheable (Vector a) where
unchanged = liftEq . unchanged
instance (Cacheable k, Cacheable v) => Cacheable (HashMap k v) where
unchanged accesses = liftEq2 (unchanged accesses) (unchanged accesses)
instance (Cacheable a) => Cacheable (HashSet a) where
unchanged = liftEq . unchanged
instance Cacheable ()
instance (Cacheable a, Cacheable b) => Cacheable (a, b)
instance (Cacheable a, Cacheable b, Cacheable c) => Cacheable (a, b, c)
instance (Cacheable a, Cacheable b, Cacheable c, Cacheable d) => Cacheable (a, b, c, d)
instance (Cacheable a, Cacheable b, Cacheable c, Cacheable d, Cacheable e) => Cacheable (a, b, c, d, e)
instance Cacheable Bool
instance Cacheable Value
instance Cacheable G.Argument
instance Cacheable G.Directive
instance Cacheable G.ExecutableDefinition
instance Cacheable G.Field
instance Cacheable G.FragmentDefinition
instance Cacheable G.FragmentSpread
instance Cacheable G.GType
instance Cacheable G.InlineFragment
instance Cacheable G.Nullability
instance Cacheable G.OperationDefinition
instance Cacheable G.OperationType
instance Cacheable G.Selection
instance Cacheable G.TypedOperationDefinition
instance Cacheable G.Value
instance Cacheable G.ValueConst
instance Cacheable G.VariableDefinition
instance Cacheable N.URI
instance (Cacheable a) => Cacheable (Maybe a)
instance (Cacheable a, Cacheable b) => Cacheable (Either a b)
instance (Cacheable a) => Cacheable [a]
instance (Cacheable a) => Cacheable (NonEmpty a)
instance (Cacheable a) => Cacheable (G.ObjectFieldG a)
deriving instance Cacheable G.Alias
deriving instance Cacheable G.EnumValue
deriving instance Cacheable G.ExecutableDocument
deriving instance Cacheable G.ListType
deriving instance Cacheable G.Name
deriving instance Cacheable G.NamedType
deriving instance Cacheable G.StringValue
deriving instance Cacheable G.Variable
deriving instance (Cacheable a) => Cacheable (G.ListValueG a)
deriving instance (Cacheable a) => Cacheable (G.ObjectValueG a)
class GCacheable f where
gunchanged :: f p -> f p -> Accesses -> Bool
instance GCacheable V1 where
gunchanged a = case a of {}
{-# INLINE gunchanged #-}
instance GCacheable U1 where
gunchanged U1 U1 _ = True
{-# INLINE gunchanged #-}
instance (Cacheable a) => GCacheable (K1 t a) where
gunchanged (K1 a) (K1 b) accesses = unchanged accesses a b
{-# INLINE gunchanged #-}
instance (GCacheable f) => GCacheable (M1 t m f) where
gunchanged (M1 a) (M1 b) = gunchanged a b
{-# INLINE gunchanged #-}
instance (GCacheable f, GCacheable g) => GCacheable (f :*: g) where
gunchanged (a1 :*: a2) (b1 :*: b2) = liftA2 (&&) (gunchanged a1 b1) (gunchanged a2 b2)
{-# INLINE gunchanged #-}
instance (GCacheable f, GCacheable g) => GCacheable (f :+: g) where
gunchanged (L1 a) (L1 b) = gunchanged a b
gunchanged (R1 a) (R1 b) = gunchanged a b
gunchanged _ _ = const False
{-# INLINE gunchanged #-}

View File

@ -0,0 +1,304 @@
{-# OPTIONS_HADDOCK not-home #-}
{-# LANGUAGE Arrows #-}
{-# LANGUAGE UndecidableInstances #-}
-- | Defines the basic 'Rule' datatype and its core operations.
module Hasura.Incremental.Internal.Rule where
import Hasura.Prelude hiding (id, (.))
import qualified Data.HashMap.Strict as HM
import Control.Applicative hiding (liftA)
import Control.Arrow.Extended
import Control.Category
import Data.Profunctor
import Data.Tuple (swap)
import Hasura.Incremental.Internal.Dependency
-- | A value of type @'Rule' m a b@ is a /build rule/: a computation that describes how to build a
-- value of type @b@ from a value of type @a@ in a monad @m@. What distinguishes @'Rule' m a b@ from
-- an ordinary function of type @a -> m b@ is that it can be made /incremental/ (in the sense of
-- “incremental compilation”)—after executing it, future executions can perform a subset of the
-- required work if only a portion of the input changed.
--
-- To achieve this, 'Rule's have a more restrictive interface: there is no @Monad ('Rule' m a)@
-- instance, for example. Instead, 'Rule's are composed using the 'Arrow' hierarchy of operations,
-- which ensures that the dependency graph of build rules is mostly static (though it may contain
-- conditional branches, and combinators such as 'keyed' can express restricted forms of dynamic
-- dependencies). Each atomic rule may be defined using the 'Monad' instance for @m@, but
-- incrementalization is not supported inside those rules — they are treated as a single, monolithic
-- computation.
--
-- Atomic rules are created with the 'arrM' function, and caching can be added to a rule using the
-- 'cache' combinator. Rules can be executed using the 'build' function, which returns a 'Result'. A
-- 'Result' contains the built value, accessible via 'result', but it also allows supplying a new
-- input value using 'rebuild' to produce a new result incrementally.
newtype Rule m a b
-- Note: this is a CPS encoding of `Accesses -> a -> m (Result m a b)`. In practice, the CPS
-- encoding seems to provide meaningful performance improvements: it cuts down significantly on
-- allocation and is friendlier to GHCs optimizer.
= Rule (forall r. Accesses -> a -> (Accesses -> b -> Rule m a b -> m r) -> m r)
build :: (Applicative m) => Rule m a b -> a -> m (Result m a b)
build (Rule r) a = r mempty a \_ b r' -> pure $ Result b r'
{-# INLINE build #-}
data Result m a b
= Result
{ result :: !b
, rebuildRule :: !(Rule m a b)
} deriving (Functor)
rebuild :: (Applicative m) => Result m a b -> a -> m (Result m a b)
rebuild = build . rebuildRule
{-# INLINE rebuild #-}
{- Note [Rule rewrite rules]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
As explained by Note [Arrow rewrite rules] in Control.Arrow.Extended, its important to define
type-specific rewrite rules to get good performance with arrows when the concrete type is used. This
is especially important for `Rule`, since the recursive definitions of operations like `.` and `arr`
are very difficult for the optimizer to deal with, and the composition of lots of small rules
created with `arr` is very inefficient.
Since GHC aggressively specializes and inlines class methods, the rules cannot be defined on the
class methods themselves. Instead, the class methods expand to auxiliary definitions, and those
definitions include an INLINABLE[0] pragma that ensures they do not inline until the final
optimization phase. The rules are defined in terms of those definitions, so they will be able to do
their work in prior phases.
Note [Desugaring derived operations]
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
One subtlety to the above is that we want to define operations in terms of other operations as much
as possible to avoid the need to write an enormous number of rewrite rules, but if we define them
that way directly, then well end up using needlessly inefficient implementations when the
operations arent specialized. Therefore, we provide efficient implementations of operations like
`second`, but aggressively rewrite them in terms of simpler primitives like `first` when GHC is able
to specialize them. -}
rComp :: Rule m a1 b -> Rule m a2 a1 -> Rule m a2 b
Rule f `rComp` Rule g = Rule \s a k -> g s a \s' b g' -> f s' b \s'' c f' -> k s'' c (f' `rComp` g')
{-# INLINABLE[0] rComp #-}
{-# RULES "associate" forall f g h. f `rComp` (g `rComp` h) = (f `rComp` g) `rComp` h #-}
rId :: Rule m a a
rId = Rule \s a k -> k s a rId
{-# INLINABLE[0] rId #-}
{-# RULES
"f/id" forall f. f `rComp` rId = f
"id/f" forall f. rId `rComp` f = f
#-}
rArr :: (a -> b) -> Rule m a b
rArr f = Rule \s a k -> k s (f a) (rArr f)
{-# INLINABLE[0] rArr #-}
{-# RULES
"arr/id" rArr (\x -> x) = rId
"arr/const" [1] forall x. rArr (\_ -> x) = rPure x
"arr/arr" forall f g. rArr f `rComp` rArr g = rArr (f . g)
"arr/arr/f" forall f g h. (f `rComp` rArr g) `rComp` rArr h = f `rComp` rArr (g . h)
#-}
rArrM :: (Monad m) => (a -> m b) -> Rule m a b
rArrM f = Rule \s a k -> f a >>= \b -> k s b (rArrM f)
{-# INLINABLE[0] rArrM #-}
{-# RULES
"arrM/arrM" forall f g. rArrM f `rComp` rArrM g = rArrM (f <=< g)
"arr/arrM" forall f g. rArr f `rComp` rArrM g = rArrM (fmap f . g)
"arrM/arr" forall f g. rArrM f `rComp` rArr g = rArrM (f . g)
"arrM/arrM/f" forall f g h. (f `rComp` rArrM g) `rComp` rArrM h = f `rComp` rArrM (g <=< h)
"arr/arrM/f" forall f g h. (f `rComp` rArr g) `rComp` rArrM h = f `rComp` rArrM (fmap g . h)
"arrM/arr/f" forall f g h. (f `rComp` rArrM g) `rComp` rArr h = f `rComp` rArrM (g . h)
#-}
rFirst :: Rule m a b1 -> Rule m (a, b2) (b1, b2)
rFirst (Rule r) = Rule \s (a, c) k -> r s a \s' b r' -> k s' (b, c) (rFirst r')
{-# INLINABLE[0] rFirst #-}
{-# RULES
"first/id" rFirst rId = rId
"first/arr" forall f. rFirst (rArr f) = rArr (second f)
"first/arrM" forall f. rFirst (rArrM f) = rArrM (runKleisli (first (Kleisli f)))
"first/push" [~1] forall f g. rFirst (f `rComp` g) = rFirst f `rComp` rFirst g
"first/pull" [1] forall f g. rFirst f `rComp` rFirst g = rFirst (f `rComp` g)
"first/f/pull" [1] forall f g h. (f `rComp` rFirst g) `rComp` rFirst h = f `rComp` rFirst (g `rComp` h)
#-}
rLeft :: Rule m a b1 -> Rule m (Either a b2) (Either b1 b2)
rLeft r0 = go r0 where
go (Rule r) = Rule \s e k -> case e of
Left a -> r s a \s' b r' -> k s' (Left b) (go r')
Right c -> k s (Right c) (go r0)
{-# INLINABLE[0] rLeft #-}
{-# RULES
"left/id" rLeft rId = rId
"left/arr" forall f. rLeft (rArr f) = rArr (left f)
"left/arrM" forall f. rLeft (rArrM f) = rArrM (runKleisli (left (Kleisli f)))
"left/push" [~1] forall f g. rLeft (f `rComp` g) = rLeft f `rComp` rLeft g
"left/pull" [1] forall f g. rLeft f `rComp` rLeft g = rLeft (f `rComp` g)
"left/f/pull" [1] forall f g h. (f `rComp` rLeft g) `rComp` rLeft h = f `rComp` rLeft (g `rComp` h)
#-}
rPure :: b -> Rule m a b
rPure a = Rule \s _ k -> k s a (rPure a)
{-# INLINABLE[0] rPure #-}
{-# RULES "pure/push" [~1] rPure = rArr . const #-} -- see Note [Desugaring derived operations]
rSecond :: Rule m a1 b -> Rule m (a2, a1) (a2, b)
rSecond (Rule r) = Rule \s (c, a) k -> r s a \s' b r' -> k s' (c, b) (rSecond r')
{-# INLINABLE[0] rSecond #-}
-- see Note [Desugaring derived operations]
{-# RULES "second/push" [~1] forall f. rSecond f = rArr swap . rFirst f . rArr swap #-}
swapEither :: Either a b -> Either b a
swapEither = either Right Left
{-# INLINE[0] swapEither #-}
rRight :: Rule m a1 b -> Rule m (Either a2 a1) (Either a2 b)
rRight r0 = go r0 where
go (Rule r) = Rule \s e k -> case e of
Left c -> k s (Left c) (go r0)
Right a -> r s a \s' b r' -> k s' (Right b) (go r')
{-# INLINABLE[0] rRight #-}
-- see Note [Desugaring derived operations]
{-# RULES "right/push" [~1] forall f. rRight f = rArr swapEither . rLeft f . rArr swapEither #-}
rSplit :: Rule m a1 b1 -> Rule m a2 b2 -> Rule m (a1, a2) (b1, b2)
Rule f `rSplit` Rule g =
Rule \s (a, b) k -> f s a \s' c f' -> g s' b \s'' d g' -> k s'' (c, d) (f' `rSplit` g')
{-# INLINABLE[0] rSplit #-}
-- see Note [Desugaring derived operations]
{-# RULES "***/push" [~1] forall f g. f `rSplit` g = rSecond g . rFirst f #-}
rFanout :: Rule m a b1 -> Rule m a b2 -> Rule m a (b1, b2)
Rule f `rFanout` Rule g =
Rule \s a k -> f s a \s' b f' -> g s' a \s'' c g' -> k s'' (b, c) (f' `rFanout` g')
{-# INLINABLE[0] rFanout #-}
-- see Note [Desugaring derived operations]
{-# RULES "&&&/push" [~1] forall f g. f `rFanout` g = (f *** g) . rArr (\a -> (a, a)) #-}
rFork :: Rule m a1 b1 -> Rule m a2 b2 -> Rule m (Either a1 a2) (Either b1 b2)
f0 `rFork` g0 = go f0 g0 where
go (Rule f) (Rule g) = Rule \s e k -> case e of
Left a -> f s a \s' b f' -> k s' (Left b) (go f' g0)
Right a -> g s a \s' b g' -> k s' (Right b) (go f0 g')
{-# INLINABLE[0] rFork #-}
-- see Note [Desugaring derived operations]
{-# RULES "+++/push" [~1] forall f g. f `rFork` g = rRight g . rLeft f #-}
fromEither :: Either a a -> a
fromEither = either id id
{-# INLINE[0] fromEither #-}
rFanin :: Rule m a1 b -> Rule m a2 b -> Rule m (Either a1 a2) b
f0 `rFanin` g0 = go f0 g0 where
go (Rule f) (Rule g) = Rule \s e k -> case e of
Left a -> f s a \s' b f' -> k s' b (go f' g0)
Right a -> g s a \s' b g' -> k s' b (go f0 g')
{-# INLINABLE[0] rFanin #-}
-- see Note [Desugaring derived operations]
{-# RULES "|||/push" [~1] forall f g. f `rFanin` g = rArr fromEither . (f +++ g) #-}
instance Functor (Rule m a) where
fmap f r = arr f . r
{-# INLINE fmap #-}
instance Applicative (Rule m a) where
pure = rPure
{-# INLINE pure #-}
(<*>) = liftA2 ($)
{-# INLINE (<*>) #-}
liftA2 f g h = arr (uncurry f) . (g &&& h)
{-# INLINE liftA2 #-}
instance Profunctor (Rule m) where
dimap f g r = arr g . r . arr f
{-# INLINE dimap #-}
lmap f r = r . arr f
{-# INLINE lmap #-}
rmap = fmap
{-# INLINE rmap #-}
instance Strong (Rule m) where
first' = rFirst
{-# INLINE first' #-}
second' = rSecond
{-# INLINE second' #-}
instance Choice (Rule m) where
left' = rLeft
{-# INLINE left' #-}
right' = rRight
{-# INLINE right' #-}
instance Category (Rule m) where
id = rId
{-# INLINE id #-}
(.) = rComp
{-# INLINE (.) #-}
instance Arrow (Rule m) where
arr = rArr
{-# INLINE arr #-}
first = rFirst
{-# INLINE first #-}
second = rSecond
{-# INLINE second #-}
(***) = rSplit
{-# INLINE (***) #-}
(&&&) = rFanout
{-# INLINE (&&&) #-}
instance ArrowChoice (Rule m) where
left = rLeft
{-# INLINE left #-}
right = rRight
{-# INLINE right #-}
(+++) = rFork
{-# INLINE (+++) #-}
(|||) = rFanin
{-# INLINE (|||) #-}
instance (Monad m) => ArrowKleisli m (Rule m) where
arrM = rArrM
{-# INLINE arrM #-}
class (Arrow arr) => ArrowDistribute arr where
-- | Distributes an arrow that operates on key-value pairs, over a 'HM.HashMap' in an
-- order-independent way.
--
-- This is intended to be used as a control operator in @proc@ notation; see
-- Note [Weird control operator types] in "Control.Arrow.Extended".
keyed
:: (Eq k, Hashable k)
=> arr (e, (k, (a, s))) b
-> arr (e, (HashMap k a, s)) (HashMap k b)
instance (Monoid w, ArrowDistribute arr) => ArrowDistribute (WriterA w arr) where
keyed (WriterA f) = WriterA (arr (swap . sequence . fmap swap) . keyed f)
{-# INLINE keyed #-}
-- | Unlike 'traverseA', using 'keyed' preserves incrementalization: if the input rule is
-- incremental in its argument, the resulting rule will be incremental as well for any entries in
-- the map that do not change between builds.
instance ArrowDistribute (Rule m) where
keyed
:: forall a b k e s
. (Eq k, Hashable k)
=> Rule m (e, (k, (a, s))) b
-> Rule m (e, (HashMap k a, s)) (HashMap k b)
keyed r0 = keyedWith HM.empty
where
keyedWith
:: HashMap k (Rule m (e, (k, (a, s))) b)
-> Rule m (e, (HashMap k a, s)) (HashMap k b)
keyedWith !rs = Rule \s (e, (vs, sk)) c ->
HM.foldrWithKey (process rs e sk) (finish c) vs s HM.empty HM.empty
process
:: HashMap k (Rule m (e, (k, (a, s))) b)
-> e -> s -> k -> a
-> (Accesses -> HashMap k b -> HashMap k (Rule m (e, (k, (a, s))) b) -> m r)
-> Accesses -> HashMap k b -> HashMap k (Rule m (e, (k, (a, s))) b) -> m r
process rs e sk k a c s !vs' !rs' =
let Rule r = HM.lookupDefault r0 k rs
in r s (e, (k, (a, sk))) \s' b r' -> c s' (HM.insert k b vs') (HM.insert k r' rs')
finish
:: (Accesses -> HashMap k b -> Rule m (e, (HashMap k a, s)) (HashMap k b) -> m r)
-> Accesses -> HashMap k b -> HashMap k (Rule m (e, (k, (a, s))) b) -> m r
finish c s !vs' !rs' = c s vs' (keyedWith rs')
{-# INLINABLE keyed #-}

View File

@ -0,0 +1,103 @@
{-# OPTIONS_HADDOCK not-home #-}
{-# LANGUAGE GADTs #-}
{-# LANGUAGE RoleAnnotations #-}
module Hasura.Incremental.Select
( Select(..)
, ConstS(..)
, selectKey
, UniqueS
, newUniqueS
, DMapS(..)
) where
import Hasura.Prelude
import qualified Data.Dependent.Map as DM
import qualified Data.HashMap.Strict as M
import Control.Monad.Unique
import Data.GADT.Compare
import Data.Kind
import Unsafe.Coerce (unsafeCoerce)
-- | The 'Select' class provides a way to access subparts of a product type using a reified
-- 'Selector'. A @'Selector' a b@ is essentially a function from @a@ to @b@, and indeed 'select'
-- converts a 'Selector' to such a function. However, unlike functions, 'Selector's can be compared
-- for equality using 'GEq' and ordered using 'GCompare'.
--
-- This is useful to implement dependency tracking, since its possible to track in a reified form
-- exactly which parts of a data structure are used.
class (GCompare (Selector a)) => Select a where
type Selector a :: Type -> Type
select :: Selector a b -> a -> b
instance (Eq k, Ord k, Hashable k) => Select (HashMap k v) where
type Selector (HashMap k v) = ConstS k (Maybe v)
select (ConstS k) = M.lookup k
instance (GCompare k) => Select (DM.DMap k f) where
type Selector (DM.DMap k f) = DMapS k f
select (DMapS k) = DM.lookup k
-- | The constant selector, which is useful for representing selectors into data structures where
-- all fields have the same type. Matching on a value of type @'ConstS' k a b@ causes @a@ and @b@ to
-- unify, effectively “pinning” @b@ to @a@.
data ConstS k a b where
ConstS :: !k -> ConstS k a a
selectKey :: (Select a, Selector a ~ ConstS k v) => k -> a -> v
selectKey = select . ConstS
instance (Eq k) => GEq (ConstS k a) where
ConstS a `geq` ConstS b
| a == b = Just Refl
| otherwise = Nothing
instance (Ord k) => GCompare (ConstS k a) where
ConstS a `gcompare` ConstS b = case compare a b of
LT -> GLT
EQ -> GEQ
GT -> GGT
-- | A 'UniqueS' is, as the name implies, a globally-unique 'Selector', which can be created using
-- 'newUniqueS'. If a value of type @'UniqueS' a@ is found to be equal (via 'geq') with another
-- value of type @'UniqueS' b@, then @a@ and @b@ must be the same type. This effectively allows the
-- creation of a dynamically-extensible sum type, where new constructors can be created at runtime
-- using 'newUniqueS'.
type role UniqueS nominal
newtype UniqueS a = UniqueS Unique
deriving (Eq)
newUniqueS :: (MonadUnique m) => m (UniqueS a)
newUniqueS = UniqueS <$> newUnique
{-# INLINE newUniqueS #-}
instance GEq UniqueS where
UniqueS a `geq` UniqueS b
-- This use of `unsafeCoerce` is safe as long as we dont export the constructor of `UniqueS`.
-- Because a `UniqueS` is, in fact, unique, then we can be certain that equality of 'UniqueS's
-- implies equality of their argument types.
| a == b = Just (unsafeCoerce Refl)
| otherwise = Nothing
instance GCompare UniqueS where
UniqueS a `gcompare` UniqueS b = case compare a b of
LT -> GLT
-- See note about `unsafeCoerce` above.
EQ -> unsafeCoerce GEQ
GT -> GGT
data DMapS k f a where
DMapS :: !(k a) -> DMapS k f (Maybe (f a))
instance (GEq k) => GEq (DMapS k f) where
DMapS a `geq` DMapS b = case a `geq` b of
Just Refl -> Just Refl
Nothing -> Nothing
instance (GCompare k) => GCompare (DMapS k f) where
DMapS a `gcompare` DMapS b = case a `gcompare` b of
GLT -> GLT
GEQ -> GEQ
GGT -> GGT

View File

@ -23,7 +23,7 @@ import Control.Monad.Fail as M (MonadFail)
import Control.Monad.Identity as M
import Control.Monad.Reader as M
import Control.Monad.State.Strict as M
import Control.Monad.Writer.Strict as M
import Control.Monad.Writer.Strict as M (MonadWriter (..), WriterT (..))
import Data.Align as M (Align (align, alignWith))
import Data.Align.Key as M (AlignWithKey (..))
import Data.Bool as M (bool)

View File

@ -15,6 +15,7 @@ module Hasura.RQL.DDL.ComputedField
import Hasura.Prelude
import Hasura.EncJSON
import Hasura.Incremental (Cacheable)
import Hasura.RQL.DDL.Deps
import Hasura.RQL.DDL.Permission.Internal
import Hasura.RQL.DDL.Schema.Function (RawFunctionInfo (..), mkFunctionArgs)
@ -39,6 +40,7 @@ data ComputedFieldDefinition
, _cfdTableArgument :: !(Maybe FunctionArgName)
} deriving (Show, Eq, Lift, Generic)
instance NFData ComputedFieldDefinition
instance Cacheable ComputedFieldDefinition
$(deriveJSON (aesonDrop 4 snakeCase) ''ComputedFieldDefinition)
data AddComputedField
@ -49,6 +51,7 @@ data AddComputedField
, _afcComment :: !(Maybe Text)
} deriving (Show, Eq, Lift, Generic)
instance NFData AddComputedField
instance Cacheable AddComputedField
$(deriveJSON (aesonDrop 4 snakeCase) ''AddComputedField)
runAddComputedField :: (MonadTx m, CacheRWM m) => AddComputedField -> m EncJSON

View File

@ -1,6 +1,7 @@
module Hasura.RQL.DDL.Headers where
import Data.Aeson
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.Instances ()
import Hasura.RQL.Types.Error
@ -14,6 +15,7 @@ data HeaderConf = HeaderConf HeaderName HeaderValue
deriving (Show, Eq, Lift, Generic)
instance NFData HeaderConf
instance Hashable HeaderConf
instance Cacheable HeaderConf
type HeaderName = T.Text
@ -21,6 +23,7 @@ data HeaderValue = HVValue T.Text | HVEnv T.Text
deriving (Show, Eq, Lift, Generic)
instance NFData HeaderValue
instance Hashable HeaderValue
instance Cacheable HeaderValue
instance FromJSON HeaderConf where
parseJSON (Object o) = do

View File

@ -48,6 +48,7 @@ module Hasura.RQL.DDL.Permission
) where
import Hasura.EncJSON
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.DDL.Permission.Internal
import Hasura.RQL.DDL.Permission.Triggers
@ -76,7 +77,7 @@ data InsPerm
, ipSet :: !(Maybe (ColumnValues Value))
, ipColumns :: !(Maybe PermColSpec)
} deriving (Show, Eq, Lift, Generic)
instance Cacheable InsPerm
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''InsPerm)
type InsPermDef = PermDef InsPerm
@ -208,6 +209,7 @@ data SelPerm
, spAllowAggregations :: !Bool -- ^ Allow aggregation
, spComputedFields :: ![ComputedFieldName] -- ^ Allowed computed fields
} deriving (Show, Eq, Lift, Generic)
instance Cacheable SelPerm
$(deriveToJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''SelPerm)
instance FromJSON SelPerm where
@ -295,7 +297,7 @@ data UpdPerm
, ucSet :: !(Maybe (ColumnValues Value)) -- Preset columns
, ucFilter :: !BoolExp -- Filter expression
} deriving (Show, Eq, Lift, Generic)
instance Cacheable UpdPerm
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''UpdPerm)
type UpdPermDef = PermDef UpdPerm
@ -358,7 +360,7 @@ instance IsPerm UpdPerm where
data DelPerm
= DelPerm { dcFilter :: !BoolExp }
deriving (Show, Eq, Lift, Generic)
instance Cacheable DelPerm
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''DelPerm)
type DelPermDef = PermDef DelPerm

View File

@ -18,6 +18,7 @@ import qualified Data.Text.Extended as T
import qualified Hasura.SQL.DML as S
import Hasura.EncJSON
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.GBoolExp
import Hasura.RQL.Types
@ -31,6 +32,7 @@ data PermColSpec
= PCStar
| PCCols ![PGCol]
deriving (Show, Eq, Lift, Generic)
instance Cacheable PermColSpec
instance FromJSON PermColSpec where
parseJSON (String "*") = return PCStar
@ -156,7 +158,7 @@ data PermDef a =
, pdPermission :: !a
, pdComment :: !(Maybe T.Text)
} deriving (Show, Eq, Lift, Generic)
instance (Cacheable a) => Cacheable (PermDef a)
$(deriveFromJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''PermDef)
instance (ToJSON a) => ToJSON (PermDef a) where

View File

@ -57,7 +57,7 @@ import Hasura.SQL.Types
import Debug.Trace
buildRebuildableSchemaCache
:: (MonadIO m, MonadTx m, HasHttpManager m, HasSQLGenCtx m)
:: (MonadIO m, MonadUnique m, MonadTx m, HasHttpManager m, HasSQLGenCtx m)
=> m (RebuildableSchemaCache m)
buildRebuildableSchemaCache = do
catalogMetadata <- liftTx fetchCatalogData

View File

@ -11,6 +11,7 @@ import Control.Arrow.Extended
import Control.Lens hiding ((.=))
import Data.Aeson
import Data.List (nub)
import Data.Monoid (First)
import Hasura.RQL.DDL.Schema.Cache.Common
import Hasura.RQL.Types

View File

@ -82,7 +82,7 @@ withPermission f = proc (e, (permission, s)) -> do
buildPermission
:: ( ArrowChoice arr, Inc.ArrowCache arr, ArrowKleisli m arr
, ArrowWriter (Seq CollectedInfo) arr, MonadTx m, MonadReader BuildReason m
, Eq a, IsPerm a, FromJSON a, Eq (PermInfo a) )
, Inc.Cacheable a, IsPerm a, FromJSON a, Inc.Cacheable (PermInfo a) )
=> ( TableCoreCache
, TableCoreInfo
, [CatalogPermission]
@ -109,7 +109,7 @@ buildPermission = Inc.cache proc (tableCache, tableInfo, permissions) -> do
rebuildViewsIfNeeded
:: ( Inc.ArrowCache arr, ArrowKleisli m arr, MonadTx m, MonadReader BuildReason m
, Eq a, IsPerm a, Eq (PermInfo a) )
, Inc.Cacheable a, IsPerm a, Inc.Cacheable (PermInfo a) )
=> (QualifiedTable, PermDef a, PermInfo a) `arr` ()
rebuildViewsIfNeeded = Inc.cache $ arrM \(tableName, permDef, info) -> do
liftTx . liftIO $ traceEventIO "START permissions/build/views"

View File

@ -6,6 +6,7 @@ module Hasura.RQL.DDL.Schema.Function where
import Hasura.EncJSON
import Hasura.GraphQL.Utils (showNames)
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.Server.Utils (makeReasonMessage)
@ -41,6 +42,7 @@ data RawFunctionInfo
, rfiDescription :: !(Maybe PGDescription)
} deriving (Show, Eq, Generic)
instance NFData RawFunctionInfo
instance Cacheable RawFunctionInfo
$(deriveJSON (aesonDrop 3 snakeCase) ''RawFunctionInfo)
mkFunctionArgs :: Int -> [QualifiedPGType] -> [FunctionArgName] -> [FunctionArg]
@ -184,6 +186,7 @@ data FunctionConfig
{ _fcSessionArgument :: !(Maybe FunctionArgName)
} deriving (Show, Eq, Generic, Lift)
instance NFData FunctionConfig
instance Cacheable FunctionConfig
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields = True} ''FunctionConfig)
emptyFunctionConfig :: FunctionConfig

View File

@ -4,12 +4,14 @@ module Hasura.RQL.Instances where
import Hasura.Prelude
import Instances.TH.Lift ()
import qualified Language.Haskell.TH.Syntax as TH
import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Language.Haskell.TH.Syntax as TH
import Data.Functor.Product
import Data.GADT.Compare
import Instances.TH.Lift ()
instance NFData G.Argument
instance NFData G.Directive
@ -45,3 +47,19 @@ instance (TH.Lift k, TH.Lift v) => TH.Lift (M.HashMap k v) where
instance TH.Lift a => TH.Lift (S.HashSet a) where
lift s = [| S.fromList $(TH.lift $ S.toList s) |]
instance (GEq f, GEq g) => GEq (Product f g) where
Pair a1 a2 `geq` Pair b1 b2
| Just Refl <- a1 `geq` b1
, Just Refl <- a2 `geq` b2
= Just Refl
| otherwise = Nothing
instance (GCompare f, GCompare g) => GCompare (Product f g) where
Pair a1 a2 `gcompare` Pair b1 b2 = case gcompare a1 b1 of
GLT -> GLT
GEQ -> case gcompare a2 b2 of
GLT -> GLT
GEQ -> GEQ
GGT -> GGT
GGT -> GGT

View File

@ -36,6 +36,7 @@ module Hasura.RQL.Types.BoolExp
, PreSetColsPartial
) where
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
@ -61,6 +62,7 @@ data GExists a
} deriving (Show, Eq, Lift, Functor, Foldable, Traversable, Data, Generic)
instance (NFData a) => NFData (GExists a)
instance (Data a) => Plated (GExists a)
instance (Cacheable a) => Cacheable (GExists a)
gExistsToJSON :: (a -> (Text, Value)) -> GExists a -> Value
gExistsToJSON f (GExists qt wh) =
@ -86,6 +88,7 @@ data GBoolExp a
deriving (Show, Eq, Lift, Functor, Foldable, Traversable, Data, Generic)
instance (NFData a) => NFData (GBoolExp a)
instance (Data a) => Plated (GBoolExp a)
instance (Cacheable a) => Cacheable (GBoolExp a)
gBoolExpTrue :: GBoolExp a
gBoolExpTrue = BoolAnd []
@ -136,6 +139,7 @@ data DWithinGeomOp a =
, dwgeomFrom :: !a
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (DWithinGeomOp a)
instance (Cacheable a) => Cacheable (DWithinGeomOp a)
$(deriveJSON (aesonDrop 6 snakeCase) ''DWithinGeomOp)
data DWithinGeogOp a =
@ -145,6 +149,7 @@ data DWithinGeogOp a =
, dwgeogUseSpheroid :: !a
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (DWithinGeogOp a)
instance (Cacheable a) => Cacheable (DWithinGeogOp a)
$(deriveJSON (aesonDrop 6 snakeCase) ''DWithinGeogOp)
data STIntersectsNbandGeommin a =
@ -153,6 +158,7 @@ data STIntersectsNbandGeommin a =
, singGeommin :: !a
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (STIntersectsNbandGeommin a)
instance (Cacheable a) => Cacheable (STIntersectsNbandGeommin a)
$(deriveJSON (aesonDrop 4 snakeCase) ''STIntersectsNbandGeommin)
data STIntersectsGeomminNband a =
@ -161,6 +167,7 @@ data STIntersectsGeomminNband a =
, signNband :: !(Maybe a)
} deriving (Show, Eq, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (STIntersectsGeomminNband a)
instance (Cacheable a) => Cacheable (STIntersectsGeomminNband a)
$(deriveJSON (aesonDrop 4 snakeCase) ''STIntersectsGeomminNband)
type CastExp a = M.HashMap PGScalarType [OpExpG a]
@ -219,6 +226,7 @@ data OpExpG a
| CLTE !PGCol
deriving (Eq, Show, Functor, Foldable, Traversable, Generic, Data)
instance (NFData a) => NFData (OpExpG a)
instance (Cacheable a) => Cacheable (OpExpG a)
opExpDepCol :: OpExpG a -> Maybe PGCol
opExpDepCol = \case
@ -291,6 +299,7 @@ data AnnBoolExpFld a
| AVRel !RelInfo !(AnnBoolExp a)
deriving (Show, Eq, Functor, Foldable, Traversable, Generic)
instance (NFData a) => NFData (AnnBoolExpFld a)
instance (Cacheable a) => Cacheable (AnnBoolExpFld a)
type AnnBoolExp a
= GBoolExp (AnnBoolExpFld a)
@ -336,6 +345,7 @@ data PartialSQLExp
| PSESQLExp !S.SQLExp
deriving (Show, Eq, Generic, Data)
instance NFData PartialSQLExp
instance Cacheable PartialSQLExp
mkTypedSessionVar :: PGType PGColumnType -> SessVar -> PartialSQLExp
mkTypedSessionVar columnType =

View File

@ -22,6 +22,7 @@ import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Hasura.Incremental (Cacheable)
import Hasura.RQL.DDL.ComputedField
import Hasura.RQL.DDL.Schema.Function
import Hasura.RQL.Types.Column
@ -36,7 +37,7 @@ import Hasura.SQL.Types
newtype CatalogForeignKey
= CatalogForeignKey
{ unCatalogForeignKey :: ForeignKey
} deriving (Show, Eq, NFData, Hashable)
} deriving (Show, Eq, NFData, Hashable, Cacheable)
instance FromJSON CatalogForeignKey where
parseJSON = withObject "CatalogForeignKey" \o -> do
@ -66,6 +67,7 @@ data CatalogTableInfo
, _ctiDescription :: !(Maybe PGDescription)
} deriving (Show, Eq, Generic)
instance NFData CatalogTableInfo
instance Cacheable CatalogTableInfo
$(deriveFromJSON (aesonDrop 4 snakeCase) ''CatalogTableInfo)
data CatalogTable
@ -77,6 +79,7 @@ data CatalogTable
, _ctInfo :: !(Maybe CatalogTableInfo)
} deriving (Show, Eq, Generic)
instance NFData CatalogTable
instance Cacheable CatalogTable
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogTable)
data CatalogRelation
@ -88,6 +91,7 @@ data CatalogRelation
, _crComment :: !(Maybe Text)
} deriving (Show, Eq, Generic)
instance NFData CatalogRelation
instance Cacheable CatalogRelation
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogRelation)
data CatalogPermission
@ -100,6 +104,7 @@ data CatalogPermission
} deriving (Show, Eq, Generic)
instance NFData CatalogPermission
instance Hashable CatalogPermission
instance Cacheable CatalogPermission
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogPermission)
data CatalogComputedField
@ -108,6 +113,7 @@ data CatalogComputedField
, _cccFunctionInfo :: ![RawFunctionInfo] -- ^ multiple functions with same name
} deriving (Show, Eq, Generic)
instance NFData CatalogComputedField
instance Cacheable CatalogComputedField
$(deriveFromJSON (aesonDrop 4 snakeCase) ''CatalogComputedField)
data CatalogEventTrigger
@ -117,6 +123,7 @@ data CatalogEventTrigger
, _cetDef :: !Value
} deriving (Show, Eq, Generic)
instance NFData CatalogEventTrigger
instance Cacheable CatalogEventTrigger
$(deriveFromJSON (aesonDrop 4 snakeCase) ''CatalogEventTrigger)
data CatalogFunction
@ -127,6 +134,7 @@ data CatalogFunction
, _cfInfo :: ![RawFunctionInfo] -- ^ multiple functions with same name
} deriving (Show, Eq, Generic)
instance NFData CatalogFunction
instance Cacheable CatalogFunction
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogFunction)
data CatalogMetadata
@ -141,4 +149,5 @@ data CatalogMetadata
, _cmComputedFields :: ![CatalogComputedField]
} deriving (Show, Eq, Generic)
instance NFData CatalogMetadata
instance Cacheable CatalogMetadata
$(deriveFromJSON (aesonDrop 3 snakeCase) ''CatalogMetadata)

View File

@ -35,6 +35,7 @@ import Data.Aeson.Casing
import Data.Aeson.TH
import Language.Haskell.TH.Syntax (Lift)
import Hasura.Incremental (Cacheable)
import Hasura.RQL.Instances ()
import Hasura.RQL.Types.Error
import Hasura.SQL.Types
@ -42,12 +43,12 @@ import Hasura.SQL.Value
newtype EnumValue
= EnumValue { getEnumValue :: T.Text }
deriving (Show, Eq, Lift, NFData, Hashable, ToJSON, ToJSONKey, FromJSON, FromJSONKey)
deriving (Show, Eq, Lift, NFData, Hashable, ToJSON, ToJSONKey, FromJSON, FromJSONKey, Cacheable)
newtype EnumValueInfo
= EnumValueInfo
{ evComment :: Maybe T.Text
} deriving (Show, Eq, Lift, NFData, Hashable)
} deriving (Show, Eq, Lift, NFData, Hashable, Cacheable)
$(deriveJSON (aesonDrop 2 snakeCase) ''EnumValueInfo)
type EnumValues = M.HashMap EnumValue EnumValueInfo
@ -61,6 +62,7 @@ data EnumReference
} deriving (Show, Eq, Generic, Lift)
instance NFData EnumReference
instance Hashable EnumReference
instance Cacheable EnumReference
$(deriveJSON (aesonDrop 2 snakeCase) ''EnumReference)
-- | The type we use for columns, which are currently always “scalars” (though see the note about
@ -77,6 +79,7 @@ data PGColumnType
deriving (Show, Eq, Generic)
instance NFData PGColumnType
instance Hashable PGColumnType
instance Cacheable PGColumnType
$(deriveToJSON defaultOptions{constructorTagModifier = drop 8} ''PGColumnType)
$(makePrisms ''PGColumnType)
@ -137,6 +140,7 @@ data PGRawColumnInfo
, prciDescription :: !(Maybe PGDescription)
} deriving (Show, Eq, Generic)
instance NFData PGRawColumnInfo
instance Cacheable PGRawColumnInfo
$(deriveJSON (aesonDrop 4 snakeCase) ''PGRawColumnInfo)
-- | “Resolved” column info, produced from a 'PGRawColumnInfo' value that has been combined with
@ -151,6 +155,7 @@ data PGColumnInfo
, pgiDescription :: !(Maybe PGDescription)
} deriving (Show, Eq, Generic)
instance NFData PGColumnInfo
instance Cacheable PGColumnInfo
$(deriveToJSON (aesonDrop 3 snakeCase) ''PGColumnInfo)
onlyIntCols :: [PGColumnInfo] -> [PGColumnInfo]

View File

@ -33,6 +33,7 @@ module Hasura.RQL.Types.Common
, isSystemDefined
) where
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.SQL.Types
@ -52,7 +53,7 @@ import qualified PostgreSQL.Binary.Decoding as PD
import qualified Test.QuickCheck as QC
newtype NonEmptyText = NonEmptyText {unNonEmptyText :: T.Text}
deriving (Show, Eq, Ord, Hashable, ToJSON, ToJSONKey, Lift, Q.ToPrepArg, DQuote, Generic, NFData)
deriving (Show, Eq, Ord, Hashable, ToJSON, ToJSONKey, Lift, Q.ToPrepArg, DQuote, Generic, NFData, Cacheable)
instance Arbitrary NonEmptyText where
arbitrary = NonEmptyText . T.pack <$> QC.listOf1 (QC.elements alphaNumerics)
@ -84,7 +85,7 @@ rootText = NonEmptyText "root"
newtype RelName
= RelName { getRelTxt :: NonEmptyText }
deriving (Show, Eq, Hashable, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Lift, Generic, Arbitrary, NFData)
deriving (Show, Eq, Hashable, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Lift, Generic, Arbitrary, NFData, Cacheable)
instance IsIden RelName where
toIden rn = Iden $ relNameToTxt rn
@ -108,6 +109,7 @@ data RelType
deriving (Show, Eq, Generic)
instance NFData RelType
instance Hashable RelType
instance Cacheable RelType
instance ToJSON RelType where
toJSON = String . relTypeToTxt
@ -132,11 +134,12 @@ data RelInfo
, riIsManual :: !Bool
} deriving (Show, Eq, Generic)
instance NFData RelInfo
instance Cacheable RelInfo
$(deriveToJSON (aesonDrop 2 snakeCase) ''RelInfo)
newtype FieldName
= FieldName { getFieldNameTxt :: T.Text }
deriving (Show, Eq, Ord, Hashable, FromJSON, ToJSON, FromJSONKey, ToJSONKey, Lift, Data, Generic, Arbitrary, NFData)
deriving (Show, Eq, Ord, Hashable, FromJSON, ToJSON, FromJSONKey, ToJSONKey, Lift, Data, Generic, Arbitrary, NFData, Cacheable)
instance IsIden FieldName where
toIden (FieldName f) = Iden f
@ -182,7 +185,7 @@ type ColMapping = HM.HashMap PGCol PGCol
-- | Postgres OIDs. <https://www.postgresql.org/docs/12/datatype-oid.html>
newtype OID = OID { unOID :: Int }
deriving (Show, Eq, NFData, Hashable, ToJSON, FromJSON, Q.FromCol)
deriving (Show, Eq, NFData, Hashable, ToJSON, FromJSON, Q.FromCol, Cacheable)
data Constraint
= Constraint
@ -191,6 +194,7 @@ data Constraint
} deriving (Show, Eq, Generic)
instance NFData Constraint
instance Hashable Constraint
instance Cacheable Constraint
$(deriveJSON (aesonDrop 2 snakeCase) ''Constraint)
data PrimaryKey a
@ -199,6 +203,7 @@ data PrimaryKey a
, _pkColumns :: !(NonEmpty a)
} deriving (Show, Eq, Generic)
instance (NFData a) => NFData (PrimaryKey a)
instance (Cacheable a) => Cacheable (PrimaryKey a)
$(makeLenses ''PrimaryKey)
$(deriveJSON (aesonDrop 3 snakeCase) ''PrimaryKey)
@ -210,12 +215,13 @@ data ForeignKey
} deriving (Show, Eq, Generic)
instance NFData ForeignKey
instance Hashable ForeignKey
instance Cacheable ForeignKey
$(deriveJSON (aesonDrop 3 snakeCase) ''ForeignKey)
type CustomColumnNames = HM.HashMap PGCol G.Name
newtype SystemDefined = SystemDefined { unSystemDefined :: Bool }
deriving (Show, Eq, FromJSON, ToJSON, Q.ToPrepArg, NFData)
deriving (Show, Eq, FromJSON, ToJSON, Q.ToPrepArg, NFData, Cacheable)
isSystemDefined :: SystemDefined -> Bool
isSystemDefined = unSystemDefined

View File

@ -4,6 +4,7 @@ Description: Schema cache types related to computed field
module Hasura.RQL.Types.ComputedField where
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Function
@ -21,7 +22,7 @@ import qualified Database.PG.Query as Q
newtype ComputedFieldName =
ComputedFieldName { unComputedFieldName :: NonEmptyText}
deriving (Show, Eq, NFData, Lift, FromJSON, ToJSON, Q.ToPrepArg, DQuote, Hashable, Q.FromCol, Generic, Arbitrary)
deriving (Show, Eq, NFData, Lift, FromJSON, ToJSON, Q.ToPrepArg, DQuote, Hashable, Q.FromCol, Generic, Arbitrary, Cacheable)
computedFieldNameToText :: ComputedFieldName -> Text
computedFieldNameToText = unNonEmptyText . unComputedFieldName
@ -36,7 +37,8 @@ data FunctionTableArgument
| FTANamed
!FunctionArgName -- ^ argument name
!Int -- ^ argument index
deriving (Show, Eq)
deriving (Show, Eq, Generic)
instance Cacheable FunctionTableArgument
instance ToJSON FunctionTableArgument where
toJSON FTAFirst = String "first_argument"
@ -45,7 +47,8 @@ instance ToJSON FunctionTableArgument where
data ComputedFieldReturn
= CFRScalar !PGScalarType
| CFRSetofTable !QualifiedTable
deriving (Show, Eq)
deriving (Show, Eq, Generic)
instance Cacheable ComputedFieldReturn
$(deriveToJSON defaultOptions { constructorTagModifier = snakeCase . drop 3
, sumEncoding = TaggedObject "type" "info"
}
@ -59,7 +62,8 @@ data ComputedFieldFunction
, _cffInputArgs :: !(Seq.Seq FunctionArg)
, _cffTableArgument :: !FunctionTableArgument
, _cffDescription :: !(Maybe PGDescription)
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance Cacheable ComputedFieldFunction
$(deriveToJSON (aesonDrop 4 snakeCase) ''ComputedFieldFunction)
data ComputedFieldInfo
@ -68,7 +72,8 @@ data ComputedFieldInfo
, _cfiFunction :: !ComputedFieldFunction
, _cfiReturnType :: !ComputedFieldReturn
, _cfiComment :: !(Maybe Text)
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance Cacheable ComputedFieldInfo
$(deriveToJSON (aesonDrop 4 snakeCase) ''ComputedFieldInfo)
$(makeLenses ''ComputedFieldInfo)

View File

@ -40,6 +40,7 @@ module Hasura.RQL.Types.DML
import qualified Hasura.SQL.DML as S
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.Types.BoolExp
import Hasura.RQL.Types.Common
@ -63,9 +64,11 @@ data ColExp
{ ceCol :: !FieldName
, ceVal :: !Value
} deriving (Show, Eq, Lift, Data, Generic)
instance Cacheable ColExp
newtype BoolExp
= BoolExp { unBoolExp :: GBoolExp ColExp } deriving (Show, Eq, Lift, Generic)
= BoolExp { unBoolExp :: GBoolExp ColExp }
deriving (Show, Eq, Lift, Generic, Cacheable)
$(makeWrapped ''BoolExp)

View File

@ -26,6 +26,7 @@ module Hasura.RQL.Types.EventTrigger
import Data.Aeson
import Data.Aeson.Casing
import Data.Aeson.TH
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.DDL.Headers
import Hasura.RQL.Types.Common (NonEmptyText (..))
@ -38,7 +39,7 @@ import qualified Database.PG.Query as Q
import qualified Text.Regex.TDFA as TDFA
newtype TriggerName = TriggerName { unTriggerName :: NonEmptyText }
deriving (Show, Eq, Hashable, Lift, DQuote, FromJSON, ToJSON, ToJSONKey, Q.FromCol, Q.ToPrepArg, Generic, Arbitrary, NFData)
deriving (Show, Eq, Hashable, Lift, DQuote, FromJSON, ToJSON, ToJSONKey, Q.FromCol, Q.ToPrepArg, Generic, Arbitrary, NFData, Cacheable)
triggerNameToTxt :: TriggerName -> Text
triggerNameToTxt = unNonEmptyText . unTriggerName
@ -50,6 +51,7 @@ data Ops = INSERT | UPDATE | DELETE | MANUAL deriving (Show)
data SubscribeColumns = SubCStar | SubCArray [PGCol]
deriving (Show, Eq, Generic, Lift)
instance NFData SubscribeColumns
instance Cacheable SubscribeColumns
instance FromJSON SubscribeColumns where
parseJSON (String s) = case s of
@ -68,6 +70,7 @@ data SubscribeOpSpec
, sosPayload :: !(Maybe SubscribeColumns)
} deriving (Show, Eq, Generic, Lift)
instance NFData SubscribeOpSpec
instance Cacheable SubscribeOpSpec
$(deriveJSON (aesonDrop 3 snakeCase){omitNothingFields=True} ''SubscribeOpSpec)
defaultNumRetries :: Int
@ -177,6 +180,7 @@ data TriggerOpsDef
, tdEnableManual :: !(Maybe Bool)
} deriving (Show, Eq, Generic, Lift)
instance NFData TriggerOpsDef
instance Cacheable TriggerOpsDef
$(deriveJSON (aesonDrop 2 snakeCase){omitNothingFields=True} ''TriggerOpsDef)
data DeleteEventTriggerQuery

View File

@ -1,5 +1,6 @@
module Hasura.RQL.Types.Function where
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.Types.Common
import Hasura.SQL.Types
@ -19,6 +20,7 @@ data FunctionType
| FTSTABLE
deriving (Eq, Generic)
instance NFData FunctionType
instance Cacheable FunctionType
$(deriveJSON defaultOptions{constructorTagModifier = drop 2} ''FunctionType)
funcTypToTxt :: FunctionType -> T.Text
@ -31,17 +33,18 @@ instance Show FunctionType where
newtype FunctionArgName =
FunctionArgName { getFuncArgNameTxt :: T.Text}
deriving (Show, Eq, NFData, ToJSON, FromJSON, Lift, DQuote, IsString, Generic, Arbitrary)
deriving (Show, Eq, NFData, ToJSON, FromJSON, Lift, DQuote, IsString, Generic, Arbitrary, Cacheable)
newtype HasDefault = HasDefault { unHasDefault :: Bool }
deriving (Show, Eq, ToJSON)
deriving (Show, Eq, ToJSON, Cacheable)
data FunctionArg
= FunctionArg
{ faName :: !(Maybe FunctionArgName)
, faType :: !QualifiedPGType
, faHasDefault :: !HasDefault
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance Cacheable FunctionArg
$(deriveToJSON (aesonDrop 2 snakeCase) ''FunctionArg)
data InputArgument a

View File

@ -23,6 +23,7 @@ module Hasura.RQL.Types.Permission
, PermId(..)
) where
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.Types.Common (NonEmptyText, adminText, mkNonEmptyText,
unNonEmptyText)
@ -44,7 +45,7 @@ import qualified PostgreSQL.Binary.Decoding as PD
newtype RoleName
= RoleName {getRoleTxt :: NonEmptyText}
deriving ( Show, Eq, Ord, Hashable, FromJSONKey, ToJSONKey, FromJSON
, ToJSON, Q.FromCol, Q.ToPrepArg, Lift, Generic, Arbitrary, NFData )
, ToJSON, Q.FromCol, Q.ToPrepArg, Lift, Generic, Arbitrary, NFData, Cacheable )
instance DQuote RoleName where
dquoteTxt = roleNameToTxt
@ -122,6 +123,7 @@ data PermType
| PTDelete
deriving (Eq, Lift, Generic)
instance NFData PermType
instance Cacheable PermType
instance Q.FromCol PermType where
fromCol bs = flip Q.fromColHelper bs $ PD.enum $ \case

View File

@ -1,6 +1,7 @@
module Hasura.RQL.Types.QueryCollection where
import Hasura.GraphQL.Validate.Types (stripTypenames)
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.Types.Common (NonEmptyText)
import Hasura.SQL.Types
@ -24,15 +25,15 @@ newtype CollectionName
newtype QueryName
= QueryName {unQueryName :: NonEmptyText}
deriving (Show, Eq, Ord, NFData, Hashable, Lift, ToJSON, ToJSONKey, FromJSON, DQuote, Generic, Arbitrary)
deriving (Show, Eq, Ord, NFData, Hashable, Lift, ToJSON, ToJSONKey, FromJSON, DQuote, Generic, Arbitrary, Cacheable)
newtype GQLQuery
= GQLQuery {unGQLQuery :: G.ExecutableDocument}
deriving (Show, Eq, NFData, Hashable, Lift, ToJSON, FromJSON)
deriving (Show, Eq, NFData, Hashable, Lift, ToJSON, FromJSON, Cacheable)
newtype GQLQueryWithText
= GQLQueryWithText (T.Text, GQLQuery)
deriving (Show, Eq, NFData, Lift, Generic)
deriving (Show, Eq, NFData, Lift, Generic, Cacheable)
instance FromJSON GQLQueryWithText where
parseJSON v@(String t) = GQLQueryWithText . (t, ) <$> parseJSON v
@ -55,6 +56,7 @@ data ListedQuery
, _lqQuery :: !GQLQueryWithText
} deriving (Show, Eq, Lift, Generic)
instance NFData ListedQuery
instance Cacheable ListedQuery
$(deriveJSON (aesonDrop 3 snakeCase) ''ListedQuery)
type QueryList = [ListedQuery]
@ -62,7 +64,7 @@ type QueryList = [ListedQuery]
newtype CollectionDef
= CollectionDef
{ _cdQueries :: QueryList }
deriving (Show, Eq, Lift, Generic, NFData)
deriving (Show, Eq, Lift, Generic, NFData, Cacheable)
$(deriveJSON (aesonDrop 3 snakeCase) ''CollectionDef)
data CreateCollection

View File

@ -12,6 +12,7 @@ import qualified Data.Text as T
import qualified Database.PG.Query as Q
import qualified Network.URI.Extended as N
import Hasura.Incremental (Cacheable)
import Hasura.RQL.DDL.Headers (HeaderConf (..))
import Hasura.RQL.Types.Error
import Hasura.SQL.Types (DQuote)
@ -23,7 +24,7 @@ newtype RemoteSchemaName
{ unRemoteSchemaName :: NonEmptyText }
deriving ( Show, Eq, Lift, Hashable, J.ToJSON, J.ToJSONKey
, J.FromJSON, Q.ToPrepArg, Q.FromCol, DQuote, NFData
, Generic, Arbitrary
, Generic, Cacheable, Arbitrary
)
data RemoteSchemaInfo
@ -47,6 +48,7 @@ data RemoteSchemaDef
, _rsdTimeoutSeconds :: !(Maybe Int)
} deriving (Show, Eq, Lift, Generic)
instance NFData RemoteSchemaDef
instance Cacheable RemoteSchemaDef
$(J.deriveToJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''RemoteSchemaDef)
instance J.FromJSON RemoteSchemaDef where
@ -65,6 +67,7 @@ data AddRemoteSchemaQuery
, _arsqComment :: !(Maybe Text)
} deriving (Show, Eq, Lift, Generic)
instance NFData AddRemoteSchemaQuery
instance Cacheable AddRemoteSchemaQuery
$(J.deriveJSON (J.aesonDrop 5 J.snakeCase) ''AddRemoteSchemaQuery)
newtype RemoteSchemaNameQuery

View File

@ -113,6 +113,7 @@ module Hasura.RQL.Types.SchemaCache
import qualified Hasura.GraphQL.Context as GC
import Hasura.Db
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.RQL.Types.BoolExp
import Hasura.RQL.Types.Column
@ -160,7 +161,8 @@ data FieldInfo
= FIColumn !PGColumnInfo
| FIRelationship !RelInfo
| FIComputedField !ComputedFieldInfo
deriving (Show, Eq)
deriving (Show, Eq, Generic)
instance Cacheable FieldInfo
$(deriveToJSON
defaultOptions { constructorTagModifier = snakeCase . drop 2
, sumEncoding = TaggedObject "type" "detail"
@ -211,6 +213,7 @@ data InsPermInfo
, ipiRequiredHeaders :: ![T.Text]
} deriving (Show, Eq, Generic)
instance NFData InsPermInfo
instance Cacheable InsPermInfo
$(deriveToJSON (aesonDrop 3 snakeCase) ''InsPermInfo)
data SelPermInfo
@ -224,6 +227,7 @@ data SelPermInfo
, spiRequiredHeaders :: ![T.Text]
} deriving (Show, Eq, Generic)
instance NFData SelPermInfo
instance Cacheable SelPermInfo
$(deriveToJSON (aesonDrop 3 snakeCase) ''SelPermInfo)
data UpdPermInfo
@ -235,6 +239,7 @@ data UpdPermInfo
, upiRequiredHeaders :: ![T.Text]
} deriving (Show, Eq, Generic)
instance NFData UpdPermInfo
instance Cacheable UpdPermInfo
$(deriveToJSON (aesonDrop 3 snakeCase) ''UpdPermInfo)
data DelPermInfo
@ -244,6 +249,7 @@ data DelPermInfo
, dpiRequiredHeaders :: ![T.Text]
} deriving (Show, Eq, Generic)
instance NFData DelPermInfo
instance Cacheable DelPermInfo
$(deriveToJSON (aesonDrop 3 snakeCase) ''DelPermInfo)
emptyRolePermInfo :: RolePermInfo
@ -283,6 +289,7 @@ data ViewInfo
, viIsInsertable :: !Bool
} deriving (Show, Eq, Generic)
instance NFData ViewInfo
instance Cacheable ViewInfo
$(deriveJSON (aesonDrop 2 snakeCase) ''ViewInfo)
isMutable :: (ViewInfo -> Bool) -> Maybe ViewInfo -> Bool
@ -302,6 +309,7 @@ data TableConfig
, _tcCustomColumnNames :: !CustomColumnNames
} deriving (Show, Eq, Lift, Generic)
instance NFData TableConfig
instance Cacheable TableConfig
$(deriveToJSON (aesonDrop 3 snakeCase) ''TableConfig)
emptyTableConfig :: TableConfig
@ -329,7 +337,8 @@ data TableCoreInfoG field primaryKeyColumn
, _tciViewInfo :: !(Maybe ViewInfo)
, _tciEnumValues :: !(Maybe EnumValues)
, _tciCustomConfig :: !TableConfig
} deriving (Show, Eq)
} deriving (Show, Eq, Generic)
instance (Cacheable a, Cacheable b) => Cacheable (TableCoreInfoG a b)
$(deriveToJSON (aesonDrop 4 snakeCase) ''TableCoreInfoG)
$(makeLenses ''TableCoreInfoG)

View File

@ -1,5 +1,6 @@
module Hasura.SQL.DML where
import Hasura.Incremental (Cacheable)
import Hasura.Prelude
import Hasura.SQL.Types
@ -33,6 +34,7 @@ data Select
, selOffset :: !(Maybe OffsetExp)
} deriving (Show, Eq, Generic, Data)
instance NFData Select
instance Cacheable Select
mkSelect :: Select
mkSelect = Select Nothing [] Nothing
@ -41,7 +43,7 @@ mkSelect = Select Nothing [] Nothing
newtype LimitExp
= LimitExp SQLExp
deriving (Show, Eq, NFData, Data)
deriving (Show, Eq, NFData, Data, Cacheable)
instance ToSQL LimitExp where
toSQL (LimitExp se) =
@ -49,7 +51,7 @@ instance ToSQL LimitExp where
newtype OffsetExp
= OffsetExp SQLExp
deriving (Show, Eq, NFData, Data)
deriving (Show, Eq, NFData, Data, Cacheable)
instance ToSQL OffsetExp where
toSQL (OffsetExp se) =
@ -57,7 +59,7 @@ instance ToSQL OffsetExp where
newtype OrderByExp
= OrderByExp [OrderByItem]
deriving (Show, Eq, NFData, Data)
deriving (Show, Eq, NFData, Data, Cacheable)
data OrderByItem
= OrderByItem
@ -66,6 +68,7 @@ data OrderByItem
, oNulls :: !(Maybe NullsOrder)
} deriving (Show, Eq, Generic, Data)
instance NFData OrderByItem
instance Cacheable OrderByItem
instance ToSQL OrderByItem where
toSQL (OrderByItem e ot no) =
@ -74,6 +77,7 @@ instance ToSQL OrderByItem where
data OrderType = OTAsc | OTDesc
deriving (Show, Eq, Lift, Generic, Data)
instance NFData OrderType
instance Cacheable OrderType
instance ToSQL OrderType where
toSQL OTAsc = "ASC"
@ -84,6 +88,7 @@ data NullsOrder
| NLast
deriving (Show, Eq, Lift, Generic, Data)
instance NFData NullsOrder
instance Cacheable NullsOrder
instance ToSQL NullsOrder where
toSQL NFirst = "NULLS FIRST"
@ -95,7 +100,7 @@ instance ToSQL OrderByExp where
newtype GroupByExp
= GroupByExp [SQLExp]
deriving (Show, Eq, NFData, Data)
deriving (Show, Eq, NFData, Data, Cacheable)
instance ToSQL GroupByExp where
toSQL (GroupByExp idens) =
@ -103,7 +108,7 @@ instance ToSQL GroupByExp where
newtype FromExp
= FromExp [FromItem]
deriving (Show, Eq, NFData, Data)
deriving (Show, Eq, NFData, Data, Cacheable)
instance ToSQL FromExp where
toSQL (FromExp items) =
@ -143,7 +148,7 @@ mkRowExp extrs = let
newtype HavingExp
= HavingExp BoolExp
deriving (Show, Eq, NFData, Data)
deriving (Show, Eq, NFData, Data, Cacheable)
instance ToSQL HavingExp where
toSQL (HavingExp be) =
@ -151,7 +156,7 @@ instance ToSQL HavingExp where
newtype WhereFrag
= WhereFrag { getWFBoolExp :: BoolExp }
deriving (Show, Eq, NFData, Data)
deriving (Show, Eq, NFData, Data, Cacheable)
instance ToSQL WhereFrag where
toSQL (WhereFrag be) =
@ -182,6 +187,7 @@ data Qual
| QualVar !T.Text
deriving (Show, Eq, Generic, Data)
instance NFData Qual
instance Cacheable Qual
mkQual :: QualifiedTable -> Qual
mkQual = QualTable
@ -198,6 +204,7 @@ data QIden
= QIden !Qual !Iden
deriving (Show, Eq, Generic, Data)
instance NFData QIden
instance Cacheable QIden
instance ToSQL QIden where
toSQL (QIden qual iden) =
@ -205,7 +212,7 @@ instance ToSQL QIden where
newtype SQLOp
= SQLOp {sqlOpTxt :: T.Text}
deriving (Show, Eq, NFData, Data)
deriving (Show, Eq, NFData, Data, Cacheable)
incOp :: SQLOp
incOp = SQLOp "+"
@ -227,7 +234,7 @@ jsonbDeleteAtPathOp = SQLOp "#-"
newtype TypeAnn
= TypeAnn { unTypeAnn :: T.Text }
deriving (Show, Eq, NFData, Data)
deriving (Show, Eq, NFData, Data, Cacheable)
mkTypeAnn :: PGType PGScalarType -> TypeAnn
mkTypeAnn = TypeAnn . toSQLTxt
@ -253,6 +260,7 @@ data CountType
| CTDistinct ![PGCol]
deriving (Show, Eq, Generic, Data)
instance NFData CountType
instance Cacheable CountType
instance ToSQL CountType where
toSQL CTStar = "*"
@ -263,7 +271,7 @@ instance ToSQL CountType where
newtype TupleExp
= TupleExp [SQLExp]
deriving (Show, Eq, NFData, Data)
deriving (Show, Eq, NFData, Data, Cacheable)
instance ToSQL TupleExp where
toSQL (TupleExp exps) =
@ -293,6 +301,7 @@ data SQLExp
| SEFunction !FunctionExp
deriving (Show, Eq, Generic, Data)
instance NFData SQLExp
instance Cacheable SQLExp
withTyAnn :: PGScalarType -> SQLExp -> SQLExp
withTyAnn colTy v = SETyAnn v . mkTypeAnn $ PGTypeScalar colTy
@ -302,7 +311,7 @@ instance J.ToJSON SQLExp where
newtype Alias
= Alias { getAlias :: Iden }
deriving (Show, Eq, NFData, Hashable, Data)
deriving (Show, Eq, NFData, Hashable, Data, Cacheable)
instance IsIden Alias where
toIden (Alias iden) = iden
@ -365,6 +374,7 @@ intToSQLExp =
data Extractor = Extractor !SQLExp !(Maybe Alias)
deriving (Show, Eq, Generic, Data)
instance NFData Extractor
instance Cacheable Extractor
mkSQLOpExp
:: SQLOp
@ -411,6 +421,7 @@ data DistinctExpr
| DistinctOn ![SQLExp]
deriving (Show, Eq, Generic, Data)
instance NFData DistinctExpr
instance Cacheable DistinctExpr
instance ToSQL DistinctExpr where
toSQL DistinctSimple = "DISTINCT"
@ -423,6 +434,7 @@ data FunctionArgs
, fasNamed :: !(HM.HashMap Text SQLExp)
} deriving (Show, Eq, Generic, Data)
instance NFData FunctionArgs
instance Cacheable FunctionArgs
instance ToSQL FunctionArgs where
toSQL (FunctionArgs positionalArgs namedArgsMap) =
@ -437,6 +449,7 @@ data FunctionExp
, feAlias :: !(Maybe Alias)
} deriving (Show, Eq, Generic, Data)
instance NFData FunctionExp
instance Cacheable FunctionExp
instance ToSQL FunctionExp where
toSQL (FunctionExp qf args alsM) =
@ -452,6 +465,7 @@ data FromItem
| FIJoin !JoinExpr
deriving (Show, Eq, Generic, Data)
instance NFData FromItem
instance Cacheable FromItem
mkSelFromItem :: Select -> Alias -> FromItem
mkSelFromItem = FISelect (Lateral False)
@ -481,7 +495,7 @@ instance ToSQL FromItem where
toSQL je
newtype Lateral = Lateral Bool
deriving (Show, Eq, Data, NFData)
deriving (Show, Eq, Data, NFData, Cacheable)
instance ToSQL Lateral where
toSQL (Lateral True) = "LATERAL"
@ -495,6 +509,7 @@ data JoinExpr
, tjeJC :: !JoinCond
} deriving (Show, Eq, Generic, Data)
instance NFData JoinExpr
instance Cacheable JoinExpr
instance ToSQL JoinExpr where
toSQL je =
@ -510,6 +525,7 @@ data JoinType
| FullOuter
deriving (Eq, Show, Generic, Data)
instance NFData JoinType
instance Cacheable JoinType
instance ToSQL JoinType where
toSQL Inner = "INNER JOIN"
@ -522,6 +538,7 @@ data JoinCond
| JoinUsing ![PGCol]
deriving (Show, Eq, Generic, Data)
instance NFData JoinCond
instance Cacheable JoinCond
instance ToSQL JoinCond where
toSQL (JoinOn be) =
@ -544,6 +561,7 @@ data BoolExp
| BEExp !SQLExp
deriving (Show, Eq, Generic, Data)
instance NFData BoolExp
instance Cacheable BoolExp
-- removes extraneous 'AND true's
simplifyBoolExp :: BoolExp -> BoolExp
@ -598,6 +616,7 @@ instance ToSQL BoolExp where
data BinOp = AndOp | OrOp
deriving (Show, Eq, Generic, Data)
instance NFData BinOp
instance Cacheable BinOp
instance ToSQL BinOp where
toSQL AndOp = "AND"
@ -625,6 +644,7 @@ data CompareOp
| SHasKeysAll
deriving (Eq, Generic, Data)
instance NFData CompareOp
instance Cacheable CompareOp
instance Show CompareOp where
show = \case
@ -768,7 +788,7 @@ instance ToSQL SQLConflict where
newtype ValuesExp
= ValuesExp [TupleExp]
deriving (Show, Eq, Data, NFData)
deriving (Show, Eq, Data, NFData, Cacheable)
instance ToSQL ValuesExp where
toSQL (ValuesExp tuples) =

View File

@ -82,6 +82,8 @@ import qualified Language.GraphQL.Draft.Syntax as G
import qualified PostgreSQL.Binary.Decoding as PD
import qualified Text.Builder as TB
import Hasura.Incremental (Cacheable)
class ToSQL a where
toSQL :: a -> TB.Builder
@ -100,7 +102,7 @@ infixr 6 <+>
newtype Iden
= Iden { getIdenTxt :: T.Text }
deriving (Show, Eq, NFData, FromJSON, ToJSON, Hashable, Semigroup, Data)
deriving (Show, Eq, NFData, FromJSON, ToJSON, Hashable, Semigroup, Data, Cacheable)
instance ToSQL Iden where
toSQL (Iden t) =
@ -160,7 +162,7 @@ class ToTxt a where
newtype TableName
= TableName { getTableTxt :: T.Text }
deriving (Show, Eq, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift, Data, Generic, Arbitrary, NFData)
deriving (Show, Eq, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift, Data, Generic, Arbitrary, NFData, Cacheable)
instance IsIden TableName where
toIden (TableName t) = Iden t
@ -204,7 +206,7 @@ isView _ = False
newtype ConstraintName
= ConstraintName { getConstraintTxt :: T.Text }
deriving (Show, Eq, DQuote, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Hashable, Lift, NFData)
deriving (Show, Eq, DQuote, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Hashable, Lift, NFData, Cacheable)
instance IsIden ConstraintName where
toIden (ConstraintName t) = Iden t
@ -214,7 +216,7 @@ instance ToSQL ConstraintName where
newtype FunctionName
= FunctionName { getFunctionTxt :: T.Text }
deriving (Show, Eq, Ord, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Hashable, Lift, Data, Generic, Arbitrary, NFData)
deriving (Show, Eq, Ord, FromJSON, ToJSON, Q.ToPrepArg, Q.FromCol, Hashable, Lift, Data, Generic, Arbitrary, NFData, Cacheable)
instance IsIden FunctionName where
toIden (FunctionName t) = Iden t
@ -230,7 +232,7 @@ instance ToTxt FunctionName where
newtype SchemaName
= SchemaName { getSchemaTxt :: T.Text }
deriving (Show, Eq, Ord, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift, Data, Generic, Arbitrary, NFData)
deriving (Show, Eq, Ord, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, Lift, Data, Generic, Arbitrary, NFData, Cacheable)
publicSchema :: SchemaName
publicSchema = SchemaName "public"
@ -250,6 +252,7 @@ data QualifiedObject a
, qName :: !a
} deriving (Show, Eq, Functor, Ord, Generic, Lift, Data)
instance (NFData a) => NFData (QualifiedObject a)
instance (Cacheable a) => Cacheable (QualifiedObject a)
instance (FromJSON a) => FromJSON (QualifiedObject a) where
parseJSON v@(String _) =
@ -299,11 +302,11 @@ type QualifiedFunction = QualifiedObject FunctionName
newtype PGDescription
= PGDescription { getPGDescription :: T.Text }
deriving (Show, Eq, FromJSON, ToJSON, Q.FromCol, NFData)
deriving (Show, Eq, FromJSON, ToJSON, Q.FromCol, NFData, Cacheable)
newtype PGCol
= PGCol { getPGColTxt :: T.Text }
deriving (Show, Eq, Ord, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, ToJSONKey, FromJSONKey, Lift, Data, Generic, Arbitrary, NFData)
deriving (Show, Eq, Ord, FromJSON, ToJSON, Hashable, Q.ToPrepArg, Q.FromCol, ToJSONKey, FromJSONKey, Lift, Data, Generic, Arbitrary, NFData, Cacheable)
instance IsIden PGCol where
toIden (PGCol t) = Iden t
@ -342,8 +345,8 @@ data PGScalarType
| PGUnknown !T.Text
deriving (Show, Eq, Lift, Generic, Data)
instance NFData PGScalarType
instance Hashable PGScalarType
instance Cacheable PGScalarType
instance ToSQL PGScalarType where
toSQL = \case
@ -524,6 +527,7 @@ data PGType a
| PGTypeArray !a
deriving (Show, Eq, Generic, Data, Functor)
instance (NFData a) => NFData (PGType a)
instance (Cacheable a) => Cacheable (PGType a)
$(deriveJSON defaultOptions{constructorTagModifier = drop 6} ''PGType)
instance (ToSQL a) => ToSQL (PGType a) where
@ -542,6 +546,7 @@ data PGTypeKind
| PGKindUnknown !T.Text
deriving (Show, Eq, Generic)
instance NFData PGTypeKind
instance Cacheable PGTypeKind
instance FromJSON PGTypeKind where
parseJSON = withText "postgresTypeKind" $
@ -571,6 +576,7 @@ data QualifiedPGType
, _qptType :: !PGTypeKind
} deriving (Show, Eq, Generic)
instance NFData QualifiedPGType
instance Cacheable QualifiedPGType
$(deriveJSON (aesonDrop 4 snakeCase) ''QualifiedPGType)
isBaseType :: QualifiedPGType -> Bool

View File

@ -8,6 +8,7 @@ import qualified Data.HashMap.Strict as M
import qualified Data.HashSet as S
import Control.Arrow.Extended
import Control.Monad.Unique
import Test.Hspec
import qualified Hasura.Incremental as Inc
@ -16,23 +17,23 @@ spec :: Spec
spec = do
describe "cache" $ do
it "skips re-running rules if the input didnt change" $ do
let add1 :: MonadState Integer m => m ()
let add1 :: (MonadState Integer m) => m ()
add1 = modify' (+1)
rule = proc (a, b) -> do
Inc.cache $ arrM (\_ -> add1) -< a
Inc.cache $ arrM (\_ -> add1 *> add1) -< b
let (result1, state1) = runState (Inc.build rule (False, False)) 0
(result1, state1) <- runStateT (Inc.build rule (False, False)) 0
state1 `shouldBe` 3
let (result2, state2) = runState (Inc.rebuild result1 (True, False)) 0
(result2, state2) <- runStateT (Inc.rebuild result1 (True, False)) 0
state2 `shouldBe` 1
let (_, state3) = runState (Inc.rebuild result2 (True, True)) 0
(_, state3) <- runStateT (Inc.rebuild result2 (True, True)) 0
state3 `shouldBe` 2
describe "keyed" $ do
it "preserves incrementalization when entries dont change" $ do
let rule :: MonadWriter (S.HashSet (String, Integer)) m
let rule :: (MonadWriter (S.HashSet (String, Integer)) m, MonadUnique m)
=> Inc.Rule m (M.HashMap String Integer) (M.HashMap String Integer)
rule = proc m ->
(| Inc.keyed (\k v -> do
@ -40,9 +41,9 @@ spec = do
returnA -< v * 2)
|) m
let (result1, log1) = runWriter . Inc.build rule $ M.fromList [("a", 1), ("b", 2)]
(result1, log1) <- runWriterT . Inc.build rule $ M.fromList [("a", 1), ("b", 2)]
Inc.result result1 `shouldBe` M.fromList [("a", 2), ("b", 4)]
log1 `shouldBe` S.fromList [("a", 1), ("b", 2)]
let (result2, log2) = runWriter . Inc.rebuild result1 $ M.fromList [("a", 1), ("b", 3), ("c", 4)]
(result2, log2) <- runWriterT . Inc.rebuild result1 $ M.fromList [("a", 1), ("b", 3), ("c", 4)]
Inc.result result2 `shouldBe` M.fromList [("a", 2), ("b", 6), ("c", 8)]
log2 `shouldBe` S.fromList [("b", 3), ("c", 4)]