BigQuery Feature Branch

This will implement BigQuery support.

Co-authored-by: Antoine Leblanc <1618949+nicuveo@users.noreply.github.com>
Co-authored-by: Sibi Prabakaran <737477+psibi@users.noreply.github.com>
Co-authored-by: Aniket Deshpande <922486+aniketd@users.noreply.github.com>
Co-authored-by: Vamshi Surabhi <6562944+0x777@users.noreply.github.com>
GitOrigin-RevId: 1a6ffaf34233e13e8125a5c908eaa7e32d65007b
This commit is contained in:
Chris Done 2021-04-12 11:18:29 +01:00 committed by hasura-bot
parent 0f7c273873
commit f7a202a363
33 changed files with 5609 additions and 2 deletions

View File

@ -36,6 +36,7 @@ In the future, we will probably offer a way to explicitly choose which behaviour
(Add entries here in the order of: server, console, cli, docs, others)
- server: support for bigquery datasets
- console: add custom_column_names to track_table request with replaced invalid characters (#992)
- console: add details button to the success notification to see inserted row

View File

@ -173,7 +173,8 @@ constraints: any.Cabal ==3.2.0.0,
any.http-client ==0.7.5,
http-client +network-uri,
any.http-client-tls ==0.3.5.3,
any.http-conduit ==2.3.7.3,
any.http-conduit ==2.3.7.4,
http-conduit +aeson,
any.http-date ==0.0.10,
any.http-types ==0.12.3,
any.http2 ==2.0.5,
@ -193,6 +194,7 @@ constraints: any.Cabal ==3.2.0.0,
jose -demos,
any.js-chart ==2.9.4.1,
any.kan-extensions ==5.2.1,
any.keys ==3.12.3,
any.lens ==4.19.2,
lens -benchmark-uniplate -dump-splices +inlining -j -old-inline-pragmas -safe +test-doctests +test-hunit +test-properties +test-templates +trustworthy,
any.lens-aeson ==1.1,
@ -242,6 +244,8 @@ constraints: any.Cabal ==3.2.0.0,
parsers +attoparsec +binary +parsec,
any.pem ==0.2.4,
any.placeholders ==0.1,
any.pointed ==5.0.2,
pointed +comonad +containers +kan-extensions +semigroupoids +semigroups +stm +tagged +transformers +unordered-containers,
any.postgresql-binary ==0.12.3.3,
any.postgresql-libpq ==0.9.4.3,
postgresql-libpq -use-pkg-config,
@ -274,6 +278,7 @@ constraints: any.Cabal ==3.2.0.0,
retry -lib-werror,
any.rts ==1.0,
any.safe ==0.3.19,
any.safe-exceptions ==0.1.7.1,
any.scientific ==0.3.6.2,
scientific -bytestring-builder -integer-simple,
any.semialign ==1.1.0.1,
@ -361,6 +366,8 @@ constraints: any.Cabal ==3.2.0.0,
vector-algorithms +bench +boundschecks -internalchecks -llvm +properties -unsafechecks,
any.vector-binary-instances ==0.2.5.1,
any.vector-builder ==0.3.8,
any.vector-instances ==3.4,
vector-instances +hashable,
any.vector-th-unbox ==0.2.1.7,
any.void ==0.7.3,
void -safe,

View File

@ -112,10 +112,12 @@ library
, deepseq
, dependent-map >=0.4 && <0.5
, exceptions
, safe-exceptions
, fast-logger
, hashable
, http-client-tls
, http-types
, http-conduit
, kan-extensions
, lifted-base
, monad-control
@ -142,6 +144,7 @@ library
, unordered-containers >= 0.2.12
, validation
, vector
, vector-instances
, wai
-- Encoder related
@ -260,6 +263,10 @@ library
, odbc
, resource-pool
-- bigquery support
, memory
, x509-store
if !flag(profiling)
build-depends:
-- 0.6.1 is supposedly not okay for ghc 8.6:
@ -291,6 +298,25 @@ library
, Hasura.App
, Hasura.Metadata.Class
, Hasura.Backends.BigQuery.Connection
, Hasura.Backends.BigQuery.DataLoader.Execute
, Hasura.Backends.BigQuery.DataLoader.Plan
, Hasura.Backends.BigQuery.DDL
, Hasura.Backends.BigQuery.DDL.BoolExp
, Hasura.Backends.BigQuery.DDL.RunSQL
, Hasura.Backends.BigQuery.DDL.Source
, Hasura.Backends.BigQuery.FromIr
, Hasura.Backends.BigQuery.Instances.Execute
, Hasura.Backends.BigQuery.Instances.Schema
, Hasura.Backends.BigQuery.Instances.Transport
, Hasura.Backends.BigQuery.Instances.Types
, Hasura.Backends.BigQuery.Instances.Metadata
, Hasura.Backends.BigQuery.Meta
, Hasura.Backends.BigQuery.Plan
, Hasura.Backends.BigQuery.Source
, Hasura.Backends.BigQuery.ToQuery
, Hasura.Backends.BigQuery.Types
, Hasura.Backends.Postgres.Connection
, Hasura.Backends.Postgres.DDL
, Hasura.Backends.Postgres.DDL.BoolExp

View File

@ -0,0 +1,204 @@
{-# LANGUAGE NumericUnderscores #-}
module Hasura.Backends.BigQuery.Connection where
import Control.Concurrent.MVar
import Control.Exception
import Crypto.Hash.Algorithms (SHA256(..))
import Crypto.PubKey.RSA.PKCS15 (signSafer)
import Crypto.PubKey.RSA.Types as Cry (Error)
import Data.Bifunctor (bimap)
import qualified Data.ByteArray.Encoding as BAE
import qualified Data.ByteString as BS
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as B8
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.Environment as Env
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.Text.Encoding.Error as TE
import Data.Time.Clock.POSIX (getPOSIXTime)
import Data.Time.Clock
import Hasura.Prelude
import Hasura.Backends.BigQuery.Source
import qualified Hasura.Backends.MSSQL.Connection as MSSQLConn (getEnv)
import Hasura.RQL.Types.Error
import Network.HTTP.Simple
import Network.HTTP.Types
newtype Scope
= Scope { unScope :: T.Text }
deriving (Show, Eq, IsString)
data GoogleAccessTokenRequest = GoogleAccessTokenRequest
{ _gatrGrantType :: !Text
, _gatrAssertion :: !Text
} deriving (Show, Eq)
$(J.deriveJSON (J.aesonDrop 5 J.snakeCase){J.omitNothingFields=False} ''GoogleAccessTokenRequest)
mkTokenRequest :: Text -> GoogleAccessTokenRequest
mkTokenRequest = GoogleAccessTokenRequest "urn:ietf:params:oauth:grant-type:jwt-bearer"
data TokenProblem
= BearerTokenDecodeProblem TE.UnicodeException
| BearerTokenSignsaferProblem Cry.Error
| TokenFetchProblem JSONException
| TokenRequestNonOK Status
deriving (Show)
instance Exception TokenProblem
data ServiceAccountProblem
= ServiceAccountFileDecodeProblem String
deriving (Show)
instance Exception ServiceAccountProblem
resolveConfigurationJson ::
(QErrM m, J.FromJSON a) =>
Env.Environment ->
ConfigurationJSON a -> -- REVIEW: Can this be made polymorphic?
m (Either String a)
resolveConfigurationJson env = \case
FromYamlJSON s -> pure . Right $ s
FromEnvJSON v -> do
fileContents <- MSSQLConn.getEnv env v
case J.eitherDecode . BL.fromStrict . TE.encodeUtf8 $ fileContents of
Left e -> pure . Left $ e
Right sa -> pure . Right $ sa
resolveConfigurationInput ::
QErrM m =>
Env.Environment ->
ConfigurationInput ->
m Text
resolveConfigurationInput env = \case
FromYaml s -> pure s
FromEnv v -> MSSQLConn.getEnv env v
resolveConfigurationInputs ::
QErrM m =>
Env.Environment ->
ConfigurationInputs ->
m [Text]
resolveConfigurationInputs env = \case
FromYamls a -> pure a
FromEnvs v -> filter (not . T.null) . T.splitOn "," <$> MSSQLConn.getEnv env v
getAccessToken :: MonadIO m => ServiceAccount -> m (Either TokenProblem TokenResp)
getAccessToken sa = do
eJwt <- encodeBearerJWT sa ["https://www.googleapis.com/auth/cloud-platform"]
case eJwt of
Left tokenProblem -> pure . Left $ tokenProblem
Right jwt ->
case TE.decodeUtf8' jwt of
Left unicodeEx -> pure . Left . BearerTokenDecodeProblem $ unicodeEx
Right assertion -> do
tokenFetchResponse :: Response (Either JSONException TokenResp) <-
httpJSONEither $
setRequestBodyJSON (mkTokenRequest assertion) $
parseRequest_ ("POST " <> tokenURL)
if getResponseStatusCode tokenFetchResponse /= 200
then
pure . Left . TokenRequestNonOK . getResponseStatus $ tokenFetchResponse
else
case getResponseBody tokenFetchResponse of
Left jsonEx -> pure . Left . TokenFetchProblem $ jsonEx
Right tr@TokenResp{_trExpiresAt} -> do
-- We add the current POSIXTime and store the POSIX "moment" at
-- which this token will expire, so that at the site where
-- we need to check if a token is nearing expiry, we only
-- need to compare it with the _then_ "current" POSIXTime.
expiresAt <- (fromIntegral _trExpiresAt +) <$> liftIO getPOSIXTime
pure . Right $ tr { _trExpiresAt = truncate expiresAt }
where
-- TODO: use jose for jwt encoding
b64EncodeJ :: (J.ToJSON a) => a -> BS.ByteString
b64EncodeJ = base64 . BL.toStrict . J.encode
base64 :: BS.ByteString -> BS.ByteString
base64 = BAE.convertToBase BAE.Base64URLUnpadded
tokenURL :: String
tokenURL = "https://www.googleapis.com/oauth2/v4/token"
maxTokenLifetime :: Int
maxTokenLifetime = 3600
truncateEquals :: B8.ByteString -> B8.ByteString
truncateEquals bs =
case B8.unsnoc bs of
Nothing -> mempty
Just (bs', x)
| x == '=' -> bs'
| otherwise -> bs
encodeBearerJWT :: ( MonadIO m ) => ServiceAccount -> [Scope] -> m (Either TokenProblem BS.ByteString)
encodeBearerJWT ServiceAccount{..} scopes = do
inp <- mkSigInput . truncate <$> liftIO getPOSIXTime
signRes <- liftIO $ signSafer (Just SHA256) (unPKey _saPrivateKey) inp
case signRes of
Left e -> pure . Left . BearerTokenSignsaferProblem $ e
Right sig -> pure . Right $ inp <> "." <> truncateEquals (base64 sig)
where
mkSigInput :: Int -> BS.ByteString
mkSigInput n = header <> "." <> payload
where
header = b64EncodeJ $ J.object
[ "alg" J..= ("RS256" :: T.Text)
, "typ" J..= ("JWT" :: T.Text)
]
payload = b64EncodeJ $ J.object [ "aud" J..= tokenURL , "scope" J..= T.intercalate " " (map unScope scopes)
, "iat" J..= n
, "exp" J..= (n + maxTokenLifetime)
, "iss" J..= _saClientEmail
]
getServiceAccount :: MonadIO m => FilePath -> m (Either ServiceAccountProblem ServiceAccount)
getServiceAccount serviceAccountFilePath =
bimap ServiceAccountFileDecodeProblem id . J.eitherDecode' <$> liftIO (BL.readFile serviceAccountFilePath)
-- | Get a usable token. If the token has expired refresh it.
getUsableToken :: MonadIO m => BigQuerySourceConfig -> m (Either TokenProblem TokenResp)
getUsableToken BigQuerySourceConfig{_scServiceAccount, _scAccessTokenMVar} =
liftIO $ modifyMVar _scAccessTokenMVar $ \mTokenResp -> do
case mTokenResp of
Nothing -> do
refreshedToken <- getAccessToken _scServiceAccount
case refreshedToken of
Left e -> pure (Nothing, Left e)
Right t -> pure (Just t, Right t)
Just t@TokenResp{_trAccessToken, _trExpiresAt} -> do
pt <- liftIO $ getPOSIXTime
if (pt >= fromIntegral _trExpiresAt - (10 :: NominalDiffTime)) -- when posix-time is greater than expires-at-minus-threshold
then do
refreshedToken' <- getAccessToken _scServiceAccount
case refreshedToken' of
Left e -> pure (Just t, Left e)
Right t' -> pure (Just t', Right t')
else pure (Just t, Right t)
data BigQueryProblem
= TokenProblem TokenProblem
deriving (Show)
runBigQuery ::
(MonadIO m) =>
BigQuerySourceConfig ->
Request ->
m (Either BigQueryProblem (Response BL.ByteString))
runBigQuery sc req = do
eToken <- getUsableToken sc
case eToken of
Left e -> pure . Left . TokenProblem $ e
Right TokenResp{_trAccessToken, _trExpiresAt} -> do
let req' = setRequestHeader "Authorization" ["Bearer " <> (TE.encodeUtf8 . coerce) _trAccessToken] req
-- TODO: Make this catch the HTTP exceptions
Right <$> httpLBS req'

View File

@ -0,0 +1,141 @@
module Hasura.Backends.BigQuery.DDL
( buildComputedFieldInfo
, buildRemoteFieldInfo
, fetchAndValidateEnumValues
, createTableEventTrigger
, buildEventTriggerInfo
, buildFunctionInfo
, updateColumnInEventTrigger
, parseBoolExpOperations
, parseCollectableType
, module M
)
where
import Hasura.Backends.BigQuery.DDL.BoolExp
import Hasura.Prelude
import Data.Aeson
import qualified Data.Environment as Env
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.ComputedField
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.EventTrigger
import Hasura.RQL.Types.Function
import Hasura.RQL.Types.RemoteRelationship
import Hasura.RQL.Types.SchemaCache
import Hasura.RQL.Types.Table
import Hasura.SQL.Backend
import Hasura.SQL.Types
import Hasura.Server.Types
import Hasura.Backends.BigQuery.Instances.Types ()
import Hasura.Server.Utils
import Hasura.Session
import qualified Hasura.Backends.BigQuery.Types as BigQuery
import Hasura.Backends.BigQuery.DDL.Source as M
buildComputedFieldInfo
:: (MonadError QErr m)
=> HashSet (TableName 'BigQuery)
-> TableName 'BigQuery
-> ComputedFieldName
-> ComputedFieldDefinition 'BigQuery
-> RawFunctionInfo
-> Maybe Text
-> m (ComputedFieldInfo 'BigQuery)
buildComputedFieldInfo _ _ _ _ _ _ =
throw400 NotSupported "Computed fields aren't supported for BigQuery sources"
buildRemoteFieldInfo
:: (MonadError QErr m)
=> RemoteRelationship 'BigQuery
-> [ColumnInfo 'BigQuery]
-> RemoteSchemaMap
-> m (RemoteFieldInfo 'BigQuery, [SchemaDependency])
buildRemoteFieldInfo _ _ _ =
throw400 NotSupported "Remote joins aren't supported for BigQuery sources"
fetchAndValidateEnumValues
:: (Monad m)
=> SourceConfig 'BigQuery
-> TableName 'BigQuery
-> Maybe (PrimaryKey 'BigQuery (RawColumnInfo 'BigQuery))
-> [RawColumnInfo 'BigQuery]
-> m (Either QErr EnumValues)
fetchAndValidateEnumValues _ _ _ _ = runExceptT $
throw400 NotSupported "Enum tables are not supported for BigQuery sources"
createTableEventTrigger
:: (Monad m)
=> ServerConfigCtx
-> SourceConfig 'BigQuery
-> TableName 'BigQuery
-> [ColumnInfo 'BigQuery]
-> TriggerName
-> TriggerOpsDef
-> m (Either QErr ())
createTableEventTrigger _ _ _ _ _ _ = runExceptT $
throw400 NotSupported "Cannot create table event triggers in BigQuery sources"
buildEventTriggerInfo
:: MonadError QErr m
=> Env.Environment
-> SourceName
-> TableName 'BigQuery
-> EventTriggerConf
-> m (EventTriggerInfo, [SchemaDependency])
buildEventTriggerInfo _ _ _ _ =
throw400 NotSupported "Table event triggers are not supported for BigQuery sources"
buildFunctionInfo
:: (MonadError QErr m)
=> SourceName
-> FunctionName 'BigQuery
-> SystemDefined
-> FunctionConfig
-> [FunctionPermissionMetadata]
-> RawFunctionInfo
-> m (FunctionInfo 'BigQuery, SchemaDependency)
buildFunctionInfo _ _ _ _ _ _ =
throw400 NotSupported "SQL Functions are not supported for BigQuery source"
updateColumnInEventTrigger
:: TableName 'BigQuery
-> Column 'BigQuery
-> Column 'BigQuery
-> TableName 'BigQuery
-> EventTriggerConf
-> EventTriggerConf
updateColumnInEventTrigger _ _ _ _ = id
parseCollectableType
:: (MonadError QErr m)
=> CollectableType (ColumnType 'BigQuery)
-> Value
-> m (PartialSQLExp 'BigQuery)
parseCollectableType collectableType = \case
String t
| isSessionVariable t -> pure $ mkTypedSessionVar collectableType $ mkSessionVariable t
| isReqUserId t -> pure $ mkTypedSessionVar collectableType userIdHeader
val -> case collectableType of
CollectableTypeScalar scalarType ->
PSESQLExp . BigQuery.ValueExpression <$> parseScalarValueColumnType scalarType val
CollectableTypeArray _ ->
throw400 NotSupported "Array types are not supported in BigQuery backend"
mkTypedSessionVar
:: CollectableType (ColumnType 'BigQuery)
-> SessionVariable -> PartialSQLExp 'BigQuery
mkTypedSessionVar columnType =
PSESessVar (msColumnTypeToScalarType <$> columnType)
msColumnTypeToScalarType :: ColumnType 'BigQuery -> ScalarType 'BigQuery
msColumnTypeToScalarType = \case
ColumnScalar scalarType -> scalarType
ColumnEnumReference _ -> BigQuery.StringScalarType

View File

@ -0,0 +1,69 @@
module Hasura.Backends.BigQuery.DDL.BoolExp where
import Hasura.Prelude
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map
import Hasura.Backends.BigQuery.Instances.Types ()
import Hasura.Backends.BigQuery.Types
import Hasura.RQL.IR.BoolExp
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.SchemaCache
import Hasura.SQL.Backend
import Hasura.SQL.Types
parseBoolExpOperations
:: forall m v
. (MonadError QErr m)
=> ValueParser 'BigQuery m v
-> FieldInfoMap (FieldInfo 'BigQuery)
-> ColumnInfo 'BigQuery
-> J.Value
-> m [OpExpG 'BigQuery v]
parseBoolExpOperations rhsParser _fields columnInfo value =
withPathK (columnName $ pgiColumn columnInfo) $
parseOperations (pgiType columnInfo) value
where
parseWithTy ty = rhsParser (CollectableTypeScalar ty)
parseOperations :: ColumnType 'BigQuery -> J.Value -> m [OpExpG 'BigQuery v]
parseOperations columnType = \case
J.Object o -> mapM (parseOperation columnType) $ Map.toList o
v -> pure . AEQ False <$> parseWithTy columnType v
parseOperation :: ColumnType 'BigQuery -> (Text, J.Value) -> m (OpExpG 'BigQuery v)
parseOperation columnType (opStr, val) = withPathK opStr $
case opStr of
"_eq" -> parseEq
"$eq" -> parseEq
"_neq" -> parseNeq
"$neq" -> parseNeq
"_gt" -> parseGt
"$gt" -> parseGt
"_lt" -> parseLt
"$lt" -> parseLt
"_gte" -> parseGte
"$gte" -> parseGte
"_lte" -> parseLte
"$lte" -> parseLte
-- TODO: support column operators
x -> throw400 UnexpectedPayload $ "Unknown operator : " <> x
where
parseOne = parseWithTy columnType val
parseEq = AEQ False <$> parseOne
parseNeq = ANE False <$> parseOne
parseGt = AGT <$> parseOne
parseLt = ALT <$> parseOne
parseGte = AGTE <$> parseOne
parseLte = ALTE <$> parseOne

View File

@ -0,0 +1,94 @@
-- Working example:
--
-- $ echo '{"type":"bigquery_run_sql","args":{"sql":"select 3 * 4 as foo, \"Hello, World!\" as bar", "source":"chinook"}}' | curl -XPOST -d @- http://localhost:8080/v2/query
-- {"result_type":"TuplesOk","result":[["foo","bar"],["12","Hello, World!"]]}
module Hasura.Backends.BigQuery.DDL.RunSQL
( runSQL
, runDatabaseInspection
, BigQueryRunSQL
)
where
import Control.Monad.IO.Class
import qualified Data.Aeson as J
import Data.Aeson.TH (deriveJSON)
import Data.Aeson.Text (encodeToLazyText)
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.Vector as V
import Hasura.Backends.BigQuery.DataLoader.Execute as Execute (BigQuery (..),
OutputValue (..),
RecordSet (..),
streamBigQuery)
import qualified Hasura.Backends.BigQuery.DataLoader.Plan as Plan
import Hasura.Backends.BigQuery.Source (BigQuerySourceConfig (..))
import Hasura.EncJSON
import Hasura.Prelude
import Hasura.RQL.DDL.Schema (RunSQLRes (..))
import Hasura.RQL.Types (CacheRWM, Code (..), MetadataM, QErr,
SourceName, askSourceConfig, throw400)
data BigQueryRunSQL
= BigQueryRunSQL
{ _mrsSql :: Text
, _mrsSource :: !SourceName
} deriving (Show, Eq)
$(deriveJSON hasuraJSON ''BigQueryRunSQL)
runSQL ::
(MonadIO m, CacheRWM m, MonadError QErr m, MetadataM m) =>
BigQueryRunSQL ->
m EncJSON
runSQL = runSQL_ recordSetAsHeaderAndRows
-- | The SQL query in the request is ignored
runDatabaseInspection ::
(MonadIO m, CacheRWM m, MonadError QErr m, MetadataM m) =>
BigQueryRunSQL ->
m EncJSON
runDatabaseInspection (BigQueryRunSQL _query source) = do
BigQuerySourceConfig{_scDatasets = dataSets} <- askSourceConfig source
let queries = ["SELECT *, ARRAY(SELECT as STRUCT * from " <>
dataSet <> ".INFORMATION_SCHEMA.COLUMNS WHERE table_name = t.table_name) as columns from " <>
dataSet <> ".INFORMATION_SCHEMA.TABLES as t" | dataSet <- dataSets]
query' = T.intercalate " UNION ALL " queries
runSQL_ recordSetAsSchema (BigQueryRunSQL query' source)
runSQL_ ::
(MonadIO m, CacheRWM m, MonadError QErr m, MetadataM m) =>
(RecordSet -> J.Value) ->
BigQueryRunSQL ->
m EncJSON
runSQL_ f (BigQueryRunSQL query source) = do
sourceConfig <- askSourceConfig source
result <-
streamBigQuery
sourceConfig
Execute.BigQuery {query = LT.fromStrict query, parameters = mempty}
case result of
Left queryError -> throw400 BigQueryError (T.pack (show queryError)) -- TODO: Pretty print the error type.
Right recordSet ->
pure
(encJFromJValue
(RunSQLRes "TuplesOk" (f recordSet)))
recordSetAsHeaderAndRows :: RecordSet -> J.Value
recordSetAsHeaderAndRows RecordSet {rows} = J.toJSON (thead : tbody)
where
thead =
case rows V.!? 0 of
Nothing -> []
Just row ->
map (J.toJSON . (coerce :: Plan.FieldName -> Text)) (OMap.keys row)
tbody :: [[J.Value]]
tbody = map (\row -> map J.toJSON (OMap.elems row)) (toList rows)
recordSetAsSchema :: RecordSet -> J.Value
recordSetAsSchema rs@(RecordSet {rows}) =
recordSetAsHeaderAndRows $
rs { rows = OMap.adjust
(TextOutputValue . LT.toStrict . encodeToLazyText . J.toJSON)
(Plan.FieldName "columns") <$> rows }

View File

@ -0,0 +1,131 @@
{-# LANGUAGE DisambiguateRecordFields #-}
{-# LANGUAGE DuplicateRecordFields #-}
module Hasura.Backends.BigQuery.DDL.Source
( resolveSource
, postDropSourceHook
, resolveSourceConfig
)
where
import Control.Concurrent.MVar (newMVar)
import qualified Data.Environment as Env
import qualified Data.HashMap.Strict as HM
import qualified Data.Text as T
import Data.Time.Clock.System
import Hasura.Backends.BigQuery.Connection
import Hasura.Backends.BigQuery.Instances.Types ()
import Hasura.Backends.BigQuery.Meta
import Hasura.Backends.BigQuery.Source
import Hasura.Backends.BigQuery.Types
import Hasura.Prelude
import Hasura.RQL.Types.Column
import Hasura.RQL.Types.Common
import Hasura.RQL.Types.Error
import Hasura.RQL.Types.Source
import Hasura.RQL.Types.Table
import Hasura.SQL.Backend
resolveSourceConfig ::
MonadIO m =>
SourceName ->
BigQueryConnSourceConfig ->
m (Either QErr BigQuerySourceConfig)
resolveSourceConfig _name BigQueryConnSourceConfig{..} = runExceptT $ do
env <- liftIO Env.getEnvironment
eSA <- resolveConfigurationJson env _cscServiceAccount
case eSA of
Left e -> throw400 Unexpected $ T.pack e
Right _scServiceAccount -> do
_scDatasets <- resolveConfigurationInputs env _cscDatasets
_scProjectId <- resolveConfigurationInput env _cscProjectId
trMVar <- liftIO $ newMVar Nothing -- `runBigQuery` initializes the token
pure BigQuerySourceConfig
{ _scAccessTokenMVar = trMVar
, ..
}
resolveSource
:: (MonadIO m)
=> BigQuerySourceConfig
-> m (Either QErr (ResolvedSource 'BigQuery))
resolveSource sourceConfig =
runExceptT $ do
result <- getTables sourceConfig
case result of
Left err ->
throw400 Unexpected $
"unexpected exception while connecting to database: " <> tshow err
Right restTables -> do
seconds <- liftIO $ fmap systemSeconds getSystemTime
pure
(ResolvedSource
{ _rsConfig = sourceConfig
, _rsTables =
HM.fromList
[ ( restTableReferenceToTableName tableReference
, DBTableMetadata
{ _ptmiOid = OID (fromIntegral seconds + index :: Int) -- TODO: The seconds are used for uniqueness. BigQuery doesn't support a "stable" ID for a table.
, _ptmiColumns =
[ RawColumnInfo
{ prciName = ColumnName name
, prciPosition = position
, prciType = restTypeToScalarType type'
, prciIsNullable =
case mode of
Nullable -> True
_ -> False
, prciDescription = Nothing
}
| (position, RestFieldSchema {name, type', mode}) <-
zip [1 ..] fields -- TODO: Same trouble as Oid above.
]
, _ptmiPrimaryKey = Nothing
, _ptmiUniqueConstraints = mempty
, _ptmiForeignKeys = mempty
, _ptmiViewInfo = Just $ ViewInfo False False False
, _ptmiDescription = Nothing
})
| (index, RestTable {tableReference, schema}) <-
zip [0 ..] restTables
, let RestTableSchema fields = schema
]
, _rsFunctions = mempty
, _rsPgScalars = mempty
})
restTypeToScalarType :: RestType -> ScalarType
restTypeToScalarType =
\case
STRING -> StringScalarType
BYTES -> BytesScalarType
INTEGER -> IntegerScalarType
FLOAT -> FloatScalarType
BOOL -> BoolScalarType
TIMESTAMP -> TimestampScalarType
DATE -> DateScalarType
TIME -> TimeScalarType
DATETIME -> DatetimeScalarType
GEOGRAPHY -> GeographyScalarType
STRUCT -> StructScalarType
BIGDECIMAL -> BigDecimalScalarType
DECIMAL -> DecimalScalarType
-- Hierarchy: Project / Dataset / Table
-- see <https://cloud.google.com/bigquery/docs/datasets-intro>
restTableReferenceToTableName :: RestTableReference -> TableName
restTableReferenceToTableName RestTableReference {..} =
TableName {tableName = tableId, tableNameSchema = datasetId}
-- We ignore project id and push that requirement up to the top to
-- the data source level.
postDropSourceHook
:: (MonadIO m)
=> BigQuerySourceConfig -> m ()
postDropSourceHook _ =
-- On BigQuery we don't keep connections open.
pure ()

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,427 @@
{-# LANGUAGE DuplicateRecordFields #-}
-- | Make a plan for the data loader to execute (.Execute).
module Hasura.Backends.BigQuery.DataLoader.Plan
( prettyPlanned
, runPlan
, planSelectHeadAndTail
, actionsForest
, drawActionsForest
, drawActionsForestSQL
, Ref
, PlannedAction(..)
, Action(..)
, Select(..)
, Join(..)
, Relationship(..)
, FieldName(..)
, HeadAndTail(..)
, selectQuery
, printDrawPlannedActions
) where
import Hasura.Prelude hiding (head, second, tail, tell)
import qualified Data.ByteString.Char8 as S8
import qualified Data.ByteString.Lazy.Char8 as L8
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Data.List as List
import qualified Data.Text as T
import qualified Data.Text.Encoding as T
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Builder as LT
import qualified Data.Text.Lazy.Encoding as LT
import Data.Aeson
import Data.Bifunctor
import Data.Graph
import Data.Sequence (Seq (..))
import Data.String
import Data.Tree
import qualified Hasura.Backends.BigQuery.ToQuery as ToQuery
import qualified Hasura.Backends.BigQuery.Types as BigQuery
--------------------------------------------------------------------------------
-- Types
data Ref = Ref
{ idx :: !Int
, text :: !Text
} deriving (Show, Eq, Generic, Ord)
instance Hashable Ref
data PlanState = PlanState
{ actions :: !(Seq PlannedAction)
, counter :: !Int
}
data PlannedAction = PlannedAction
{ ref :: Ref
, action :: Action
} deriving (Show)
newtype Plan a = Plan
{ unPlan :: State PlanState a
} deriving (Functor, Applicative, Monad, MonadState PlanState)
data Action
= SelectAction Select
| JoinAction Join
deriving (Show)
data Select = Select
{ selectTop :: !BigQuery.Top
, selectProjections :: !(NonEmpty BigQuery.Projection)
, selectFrom :: !BigQuery.From
, selectGroupBy :: ![BigQuery.FieldName]
, selectWhere :: !BigQuery.Where
, selectOrderBy :: !(Maybe (NonEmpty BigQuery.OrderBy))
, selectOffset :: !(Maybe BigQuery.Expression)
, selectRelationship :: !(Maybe Relationship)
, selectSqlJoins :: ![BigQuery.Join]
, selectHaskellJoins :: ![BigQuery.Join]
, selectAggUnwrap :: !(Maybe Text)
, wantedFields :: !(Maybe [Text])
} deriving (Show)
data Relationship = Relationship
{ leftRecordSet :: Ref
, onFields :: [(FieldName, FieldName)]
, rightTable :: BigQuery.EntityAlias
} deriving (Show)
newtype FieldName =
FieldName Text
deriving (Show, Ord, Eq, Hashable, FromJSON, ToJSONKey, IsString)
data Join = Join
{ joinOn :: [(FieldName,FieldName)]
, leftRecordSet :: Ref
, rightRecordSet :: Ref
, joinProvenance :: BigQuery.JoinProvenance
, joinFieldName :: !Text
, joinExtractPath :: !(Maybe Text)
, wantedFields :: !(Maybe [Text])
} deriving (Show)
data HeadAndTail = HeadAndTail
{ head :: Ref
, tail :: Ref
}
--------------------------------------------------------------------------------
-- Run planner
runPlan :: Plan r -> (r, [PlannedAction])
runPlan =
second (toList . actions) .
flip runState (PlanState {actions = mempty, counter = 0}) . unPlan
--------------------------------------------------------------------------------
-- Planners
planSelectHeadAndTail :: Maybe Relationship -> Maybe Text -> BigQuery.Select -> Plan HeadAndTail
planSelectHeadAndTail relationship joinExtractPath select0 = do
ref <- generate (selectFromName (BigQuery.selectFrom select0))
let select = fromSelect relationship joinExtractPath select0
action = SelectAction select
tell PlannedAction {ref, action}
joinsFinalRef <- foldM planJoin ref (selectHaskellJoins select)
pure
(let head = ref
tail = case selectHaskellJoins select of
[] -> ref
_ -> joinsFinalRef
in HeadAndTail {head,tail})
planJoin :: Ref -> BigQuery.Join -> Plan Ref
planJoin leftRecordSet BigQuery.Join {..} = do
ref <- generate (joinAliasName joinAlias)
let joinFields = fmap (bimap toFieldName toFieldName) joinOn
rightRecordSet <-
case joinSource of
BigQuery.JoinSelect select ->
fmap
(\HeadAndTail {..} -> tail)
(planSelectHeadAndTail
(Just
(Relationship
{ leftRecordSet
, onFields = joinFields
, rightTable = joinRightTable
}))
joinExtractPath
select)
let action =
JoinAction
Join
{ leftRecordSet
, rightRecordSet
, joinOn = joinFields
, wantedFields
, ..
}
tell PlannedAction {ref, action}
pure ref
where
BigQuery.JoinSelect BigQuery.Select {selectFinalWantedFields = wantedFields} =
joinSource
--------------------------------------------------------------------------------
-- Conversions
-- TODO: Check this. We're intentionally discarding the table
-- qualification.
toFieldName :: BigQuery.FieldName -> FieldName
toFieldName (BigQuery.FieldName {fieldName = t}) = FieldName t
joinAliasName :: BigQuery.EntityAlias -> Text
joinAliasName (BigQuery.EntityAlias {entityAliasText}) = entityAliasText
selectFromName :: BigQuery.From -> Text
selectFromName (BigQuery.FromQualifiedTable (BigQuery.Aliased {aliasedThing = BigQuery.TableName {tableName}})) =
tableName
fromSelect :: Maybe Relationship -> Maybe Text -> BigQuery.Select -> Select
fromSelect selectRelationship selectAggUnwrap BigQuery.Select {..} =
Select
{ selectSqlJoins =
mapMaybe
(\case
j@BigQuery.Join {joinProvenance = BigQuery.OrderByJoinProvenance} ->
pure j
j@BigQuery.Join {joinProvenance = BigQuery.ArrayAggregateJoinProvenance} ->
pure j
_ -> Nothing)
selectJoins
, selectHaskellJoins =
mapMaybe
(\case
BigQuery.Join {joinProvenance = BigQuery.OrderByJoinProvenance} ->
Nothing
BigQuery.Join {joinProvenance = BigQuery.ArrayAggregateJoinProvenance} ->
Nothing
j -> pure j)
selectJoins
, wantedFields = selectFinalWantedFields
, ..
}
tell :: PlannedAction -> Plan ()
tell action = modify' (\s -> s {actions = actions s :|> action})
generate :: Text -> Plan Ref
generate text = do
idx <- gets counter
modify' (\s -> s {counter = counter s + 1})
pure (Ref {idx, text})
--------------------------------------------------------------------------------
-- Plan pretty printer
prettyPlanned :: [PlannedAction] -> IO ()
prettyPlanned =
L8.putStrLn .
LT.encodeUtf8 . LT.toLazyText . mconcat . List.intersperse "\n\n" . map prettyPlannedAction
prettyPlannedActionsSQL :: PlannedAction -> Maybe LT.Builder
prettyPlannedActionsSQL PlannedAction {action} =
case action of
JoinAction {} -> Nothing
SelectAction select -> pure query
where (query, _params) =
ToQuery.renderBuilderPretty
(ToQuery.fromSelect (selectQuery select))
prettyPlannedAction :: PlannedAction -> LT.Builder
prettyPlannedAction PlannedAction {ref, action} =
case action of
SelectAction select ->
mconcat
(List.intersperse
"\n"
(mconcat
(filter
(/= mempty)
[ [ "Load " <> prettyFrom (selectFrom select) <> " producing " <>
prettyRef ref
]
, [ "Fields: " <>
mconcat
(List.intersperse
", "
(map
prettyProjection
(toList (selectProjections select))))
]
, case selectRelationship select of
Nothing -> []
Just relationship -> [prettyRelationship relationship]
, (map prettyJoin (selectSqlJoins select))
, case selectTop select of
BigQuery.NoTop -> []
BigQuery.Top top ->
["Limit " <> LT.fromText (tshow top)]
, ["SQL:"]
, [query]
, ["Params: " | not (null params)]
, map
(\(idx :: Int, value) ->
fromString (show idx) <> "=" <> fromString (show value))
(OMap.toList params)
])))
where (query, params) =
ToQuery.renderBuilderPretty
(ToQuery.fromSelect (selectQuery select))
JoinAction Join {leftRecordSet, rightRecordSet, joinOn} ->
mconcat
(List.intersperse
"\n"
[ "Join " <> prettyRef leftRecordSet <> " with " <>
prettyRef rightRecordSet <>
" producing " <>
prettyRef ref
, "On " <> prettyJoinFields joinOn
])
prettyRef :: Ref -> LT.Builder
prettyRef Ref {..} = "#" <> LT.fromText (text <> tshow idx)
prettyFrom :: BigQuery.From -> LT.Builder
prettyFrom =
\case
BigQuery.FromQualifiedTable aliased ->
prettyAliased
(fmap
(\BigQuery.TableName {tableName = t} -> (LT.fromText t))
aliased)
prettyJoin :: BigQuery.Join -> LT.Builder
prettyJoin BigQuery.Join {..} =
"SQL join with " <> src <> " on " <> prettyJoinFields' joinOn <> " for " <>
reason
where
reason =
case joinProvenance of
BigQuery.OrderByJoinProvenance -> "order by"
BigQuery.ObjectJoinProvenance -> "object relation"
BigQuery.ArrayAggregateJoinProvenance -> "array aggregate relation"
BigQuery.ArrayJoinProvenance -> "array relation"
BigQuery.MultiplexProvenance -> "multiplex"
src =
case joinSource of
BigQuery.JoinSelect select -> prettyFrom (BigQuery.selectFrom select)
prettyJoinFields' :: [(BigQuery.FieldName, BigQuery.FieldName)] -> LT.Builder
prettyJoinFields' onFields =
mconcat
(List.intersperse
", "
(map
(\(left, right) ->
"(" <> prettyFieldName' left <> " = " <> prettyFieldName' right <>
")")
onFields))
prettyRelationship :: Relationship -> LT.Builder
prettyRelationship Relationship {leftRecordSet, onFields} =
"Relationship: " <> prettyRef leftRecordSet <> " on " <>
prettyJoinFields onFields
prettyJoinFields :: [(FieldName, FieldName)] -> LT.Builder
prettyJoinFields onFields =
mconcat
(List.intersperse
", "
(map
(\(left, right) ->
"(" <> prettyFieldName left <> " = " <> prettyFieldName right <>
")")
onFields))
prettyFieldName :: FieldName -> LT.Builder
prettyFieldName (FieldName t) = LT.fromText t
prettyProjection :: BigQuery.Projection -> LT.Builder
prettyProjection =
\case
BigQuery.ExpressionProjection aliased ->
prettyAliased (fmap (\_e -> "<Expression" <> ">") aliased)
BigQuery.FieldNameProjection aliased ->
prettyAliased (fmap prettyFieldName' aliased)
BigQuery.AggregateProjection aliased ->
prettyAliased (fmap (const "<Aggregate>") aliased)
BigQuery.StarProjection -> "*"
BigQuery.ArrayAggProjection {} -> "<ArrayAgg>"
BigQuery.EntityProjection aliased ->
prettyAliased (fmap (LT.fromText . joinAliasName) aliased)
prettyAliased :: BigQuery.Aliased LT.Builder -> LT.Builder
prettyAliased BigQuery.Aliased {aliasedThing, aliasedAlias} =
aliasedThing <> " as " <> LT.fromText aliasedAlias
prettyFieldName' :: BigQuery.FieldName -> LT.Builder
prettyFieldName' (BigQuery.FieldName {fieldName, fieldNameEntity}) =
LT.fromText (fieldNameEntity <> "." <> fieldName)
printDrawPlannedActions :: [PlannedAction] -> IO ()
printDrawPlannedActions = S8.putStrLn . T.encodeUtf8 . drawPlannedActions
drawActionsForest :: Forest PlannedAction -> Text
drawActionsForest =
T.pack . drawForest .
fmap (fmap (LT.unpack . LT.toLazyText . prettyPlannedAction))
drawActionsForestSQL :: Forest PlannedAction -> Text
drawActionsForestSQL =
T.intercalate ";\n\n" .
mapMaybe (fmap (LT.toStrict . LT.toLazyText) . prettyPlannedActionsSQL) .
foldMap toList
drawPlannedActions :: [PlannedAction] -> Text
drawPlannedActions =
drawActionsForest . actionsForest id
--------------------------------------------------------------------------------
-- Graphing the plan to a forest
actionsForest :: (Graph -> Graph) -> [PlannedAction] -> Forest PlannedAction
actionsForest transform actions =
let (graph, vertex2Node, _key2Vertex) =
graphFromEdges
(map
(\PlannedAction {ref, action} ->
( action
, ref
, map
(\PlannedAction {ref = r} -> r)
(filter (elem ref . plannedActionRefs) actions)))
actions)
in fmap
(fmap
((\(action, ref, _refs) -> PlannedAction {ref, action}) . vertex2Node))
(dff (transform graph))
where
plannedActionRefs PlannedAction {action} =
case action of
SelectAction Select {selectRelationship} ->
case selectRelationship of
Just Relationship {leftRecordSet} -> [leftRecordSet]
Nothing -> mempty
JoinAction Join {leftRecordSet, rightRecordSet} ->
[leftRecordSet, rightRecordSet]
--------------------------------------------------------------------------------
-- Build a query
selectQuery :: Select -> BigQuery.Select
selectQuery Select {..} =
BigQuery.Select
{ selectFor = BigQuery.NoFor
, selectJoins = selectSqlJoins
, selectFinalWantedFields = wantedFields
, ..
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,121 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.BigQuery.Instances.Execute () where
import qualified Data.Aeson as Aeson
import qualified Data.HashMap.Strict.InsOrd as OMap
import qualified Hasura.Backends.BigQuery.DataLoader.Execute as DataLoader
import qualified Hasura.Backends.BigQuery.DataLoader.Plan as DataLoader
import Hasura.EncJSON
import Hasura.Prelude
import qualified Hasura.RQL.Types.Error as RQL
import qualified Hasura.SQL.AnyBackend as AB
import qualified Data.Environment as Env
import qualified Language.GraphQL.Draft.Syntax as G
import qualified Network.HTTP.Client as HTTP
import qualified Network.HTTP.Types as HTTP
import qualified Hasura.Tracing as Tracing
import Hasura.Backends.BigQuery.Plan
import Hasura.GraphQL.Context
import Hasura.GraphQL.Execute.Backend
import Hasura.GraphQL.Parser
import Hasura.RQL.Types
import Hasura.Session
-- MultiplexedQuery
instance BackendExecute 'BigQuery where
type PreparedQuery 'BigQuery = Text
type ExecutionMonad 'BigQuery = Tracing.TraceT (ExceptT QErr IO)
type MultiplexedQuery 'BigQuery = Void
getRemoteJoins = const []
mkDBQueryPlan = msDBQueryPlan
mkDBMutationPlan = msDBMutationPlan
mkDBSubscriptionPlan _ _ _ _ =
throwError $ RQL.internalError "Cannot currently perform subscriptions on BigQuery sources."
-- query
msDBQueryPlan
:: forall m.
( MonadError QErr m
)
=> Env.Environment
-> HTTP.Manager
-> [HTTP.Header]
-> UserInfo
-> [G.Directive G.Name]
-> SourceName
-> SourceConfig 'BigQuery
-> QueryDB 'BigQuery (UnpreparedValue 'BigQuery)
-> m ExecutionStep
msDBQueryPlan _env _manager _reqHeaders userInfo _directives sourceName sourceConfig qrf = do
select <- planNoPlan userInfo qrf
let (!headAndTail, !plannedActionsList) =
DataLoader.runPlan
(DataLoader.planSelectHeadAndTail Nothing Nothing select)
!actionsForest = DataLoader.actionsForest id plannedActionsList
let action = do
result <-
DataLoader.runExecute
sourceConfig
headAndTail
(DataLoader.execute actionsForest)
case result of
Left err -> throw500WithDetail "dataLoader error" $ Aeson.toJSON $ show err
Right recordSet -> pure $! recordSetToEncJSON recordSet
pure
$ ExecStepDB []
. AB.mkAnyBackend
$ DBStepInfo sourceName sourceConfig (Just (DataLoader.drawActionsForest actionsForest)) action
-- | Convert the dataloader's 'RecordSet' type to JSON.
recordSetToEncJSON :: DataLoader.RecordSet -> EncJSON
recordSetToEncJSON DataLoader.RecordSet {rows} =
encJFromList (toList (fmap encJFromRecord rows))
where
encJFromRecord =
encJFromInsOrdHashMap . fmap encJFromOutputValue . OMap.mapKeys coerce
encJFromOutputValue outputValue =
case outputValue of
DataLoader.NullOutputValue -> encJFromJValue Aeson.Null
DataLoader.DecimalOutputValue !i -> encJFromJValue i
DataLoader.BigDecimalOutputValue !i -> encJFromJValue i
DataLoader.FloatOutputValue !i -> encJFromJValue i
DataLoader.TextOutputValue !i -> encJFromJValue i
DataLoader.BytesOutputValue !i -> encJFromJValue i
DataLoader.DateOutputValue !i -> encJFromJValue i
DataLoader.TimestampOutputValue !i -> encJFromJValue i
DataLoader.TimeOutputValue !i -> encJFromJValue i
DataLoader.DatetimeOutputValue !i -> encJFromJValue i
DataLoader.GeographyOutputValue !i -> encJFromJValue i
DataLoader.BoolOutputValue !i -> encJFromJValue i
DataLoader.IntegerOutputValue !i -> encJFromJValue i
DataLoader.ArrayOutputValue !vector ->
encJFromList (toList (fmap encJFromOutputValue vector))
-- Really, the case below shouldn't be happening. But if it
-- does, it's not a problem either. The output will just have
-- a record in it.
DataLoader.RecordOutputValue !record -> encJFromRecord record
-- mutation
msDBMutationPlan
:: forall m.
( MonadError QErr m
)
=> Env.Environment
-> HTTP.Manager
-> [HTTP.Header]
-> UserInfo
-> Bool
-> SourceName
-> SourceConfig 'BigQuery
-> MutationDB 'BigQuery (UnpreparedValue 'BigQuery)
-> m ExecutionStep
msDBMutationPlan _env _manager _reqHeaders _userInfo _stringifyNum _sourceName _sourceConfig _mrf =
throw500 "mutations are not supported in BigQuery; this should be unreachable"

View File

@ -0,0 +1,25 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
-- |
module Hasura.Backends.BigQuery.Instances.Metadata where
import qualified Hasura.Backends.BigQuery.DDL as BigQuery
import Hasura.RQL.Types.Metadata.Backend
import Hasura.SQL.Backend
instance BackendMetadata 'BigQuery where
buildComputedFieldInfo = BigQuery.buildComputedFieldInfo
buildRemoteFieldInfo = BigQuery.buildRemoteFieldInfo
fetchAndValidateEnumValues = BigQuery.fetchAndValidateEnumValues
resolveSourceConfig = BigQuery.resolveSourceConfig
resolveDatabaseMetadata = BigQuery.resolveSource
createTableEventTrigger = BigQuery.createTableEventTrigger
buildEventTriggerInfo = BigQuery.buildEventTriggerInfo
parseBoolExpOperations = BigQuery.parseBoolExpOperations
buildFunctionInfo = BigQuery.buildFunctionInfo
updateColumnInEventTrigger = BigQuery.updateColumnInEventTrigger
parseCollectableType = BigQuery.parseCollectableType
postDropSourceHook = BigQuery.postDropSourceHook

View File

@ -0,0 +1,362 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.BigQuery.Instances.Schema () where
import qualified Data.Aeson as J
import qualified Data.HashMap.Strict as Map
import qualified Data.List.NonEmpty as NE
import qualified Data.Text as T
import Data.Text.Extended
import qualified Hasura.Backends.BigQuery.Types as BigQuery
import Hasura.GraphQL.Context
import qualified Hasura.GraphQL.Parser as P
import Hasura.GraphQL.Parser hiding (EnumValueInfo, field)
import Hasura.GraphQL.Parser.Internal.Parser hiding (field)
import Hasura.GraphQL.Schema.Backend
import qualified Hasura.GraphQL.Schema.Build as GSB
import Hasura.GraphQL.Schema.Common
import Hasura.Prelude
import qualified Hasura.RQL.IR.Select as IR
import qualified Hasura.RQL.IR.Update as IR
import Hasura.RQL.Types
import qualified Hasura.RQL.Types.Error as RQL
import qualified Language.GraphQL.Draft.Syntax as G
----------------------------------------------------------------
-- BackendSchema instance
instance BackendSchema 'BigQuery where
-- top level parsers
buildTableQueryFields = GSB.buildTableQueryFields
buildTableRelayQueryFields = msBuildTableRelayQueryFields
buildTableInsertMutationFields = msBuildTableInsertMutationFields
buildTableUpdateMutationFields = msBuildTableUpdateMutationFields
buildTableDeleteMutationFields = msBuildTableDeleteMutationFields
buildFunctionQueryFields = msBuildFunctionQueryFields
buildFunctionRelayQueryFields = msBuildFunctionRelayQueryFields
buildFunctionMutationFields = msBuildFunctionMutationFields
-- backend extensions
relayExtension = const Nothing
nodesAggExtension = const $ Just ()
-- indivdual components
columnParser = msColumnParser
jsonPathArg = msJsonPathArg
orderByOperators = msOrderByOperators
comparisonExps = msComparisonExps
updateOperators = msUpdateOperators
offsetParser = msOffsetParser
mkCountType = msMkCountType
aggregateOrderByCountType = BigQuery.IntegerScalarType
computedField = msComputedField
node = msNode
tableDistinctOn = msTableDistinctOn
remoteRelationshipField = msRemoteRelationshipField
-- SQL literals
columnDefaultValue = error "TODO: Make impossible by the type system. BigQuery doesn't support insertions."
----------------------------------------------------------------
-- Top level parsers
msBuildTableRelayQueryFields
:: MonadBuildSchema 'BigQuery r m n
=> SourceName
-> SourceConfig 'BigQuery
-> TableName 'BigQuery
-> TableInfo 'BigQuery
-> G.Name
-> NESeq (ColumnInfo 'BigQuery)
-> SelPermInfo 'BigQuery
-> m (Maybe (FieldParser n (QueryRootField UnpreparedValue)))
msBuildTableRelayQueryFields _sourceName _sourceInfo _tableName _tableInfo _gqlName _pkeyColumns _selPerms =
pure Nothing
msBuildTableInsertMutationFields
:: MonadBuildSchema 'BigQuery r m n
=> SourceName
-> SourceConfig 'BigQuery
-> TableName 'BigQuery
-> TableInfo 'BigQuery
-> G.Name
-> InsPermInfo 'BigQuery
-> Maybe (SelPermInfo 'BigQuery)
-> Maybe (UpdPermInfo 'BigQuery)
-> m [FieldParser n (MutationRootField UnpreparedValue)]
msBuildTableInsertMutationFields _sourceName _sourceInfo _tableName _tableInfo _gqlName _insPerms _selPerms _updPerms =
pure []
msBuildTableUpdateMutationFields
:: MonadBuildSchema 'BigQuery r m n
=> SourceName
-> SourceConfig 'BigQuery
-> TableName 'BigQuery
-> TableInfo 'BigQuery
-> G.Name
-> UpdPermInfo 'BigQuery
-> Maybe (SelPermInfo 'BigQuery)
-> m [FieldParser n (MutationRootField UnpreparedValue)]
msBuildTableUpdateMutationFields _sourceName _sourceInfo _tableName _tableInfo _gqlName _updPerns _selPerms =
pure []
msBuildTableDeleteMutationFields
:: MonadBuildSchema 'BigQuery r m n
=> SourceName
-> SourceConfig 'BigQuery
-> TableName 'BigQuery
-> TableInfo 'BigQuery
-> G.Name
-> DelPermInfo 'BigQuery
-> Maybe (SelPermInfo 'BigQuery)
-> m [FieldParser n (MutationRootField UnpreparedValue)]
msBuildTableDeleteMutationFields _sourceName _sourceInfo _tableName _tableInfo _gqlName _delPerns _selPerms =
pure []
msBuildFunctionQueryFields
:: MonadBuildSchema 'BigQuery r m n
=> SourceName
-> SourceConfig 'BigQuery
-> FunctionName 'BigQuery
-> FunctionInfo 'BigQuery
-> TableName 'BigQuery
-> SelPermInfo 'BigQuery
-> m [FieldParser n (QueryRootField UnpreparedValue)]
msBuildFunctionQueryFields _ _ _ _ _ _ =
pure []
msBuildFunctionRelayQueryFields
:: MonadBuildSchema 'BigQuery r m n
=> SourceName
-> SourceConfig 'BigQuery
-> FunctionName 'BigQuery
-> FunctionInfo 'BigQuery
-> TableName 'BigQuery
-> NESeq (ColumnInfo 'BigQuery)
-> SelPermInfo 'BigQuery
-> m (Maybe (FieldParser n (QueryRootField UnpreparedValue)))
msBuildFunctionRelayQueryFields _sourceName _sourceInfo _functionName _functionInfo _tableName _pkeyColumns _selPerms =
pure Nothing
msBuildFunctionMutationFields
:: MonadBuildSchema 'BigQuery r m n
=> SourceName
-> SourceConfig 'BigQuery
-> FunctionName 'BigQuery
-> FunctionInfo 'BigQuery
-> TableName 'BigQuery
-> SelPermInfo 'BigQuery
-> m [FieldParser n (MutationRootField UnpreparedValue)]
msBuildFunctionMutationFields _ _ _ _ _ _ =
pure []
----------------------------------------------------------------
-- Individual components
msColumnParser
:: (MonadSchema n m, MonadError QErr m)
=> ColumnType 'BigQuery
-> G.Nullability
-> m (Parser 'Both n (Opaque (ColumnValue 'BigQuery)))
msColumnParser columnType (G.Nullability isNullable) =
opaque . fmap (ColumnValue columnType) <$> case columnType of
ColumnScalar scalarType -> case scalarType of
-- bytestrings
-- we only accept string literals
BigQuery.BytesScalarType -> pure $ possiblyNullable scalarType $ BigQuery.StringValue <$> P.string
-- text
BigQuery.StringScalarType -> pure $ possiblyNullable scalarType $ BigQuery.StringValue <$> P.string
-- floating point values
-- TODO: we do not perform size checks here, meaning we would accept an
-- out-of-bounds value as long as it can be represented by a GraphQL float; this
-- will in all likelihood error on the BigQuery side. Do we want to handle those
-- properly here?
BigQuery.FloatScalarType -> pure $ possiblyNullable scalarType $ BigQuery.FloatValue . BigQuery.doubleToFloat64 <$> P.float
-- Int types; we cram everything into Double at the moment
-- TODO: Distinguish between ints and doubles
BigQuery.IntegerScalarType -> pure $ possiblyNullable scalarType $ BigQuery.IntegerValue . BigQuery.intToInt64 . round <$> P.float
BigQuery.DecimalScalarType -> pure $ possiblyNullable scalarType $ BigQuery.DecimalValue . BigQuery.doubleToDecimal <$> P.float
BigQuery.BigDecimalScalarType -> pure $ possiblyNullable scalarType $ BigQuery.BigDecimalValue . BigQuery.doubleToBigDecimal <$> P.float
-- boolean type
BigQuery.BoolScalarType -> pure $ possiblyNullable scalarType $ BigQuery.BoolValue <$> P.boolean
BigQuery.DateScalarType -> pure $ possiblyNullable scalarType $ BigQuery.DateValue . BigQuery.Date <$> P.string
BigQuery.DatetimeScalarType -> pure $ possiblyNullable scalarType $ BigQuery.DatetimeValue . BigQuery.Datetime <$> P.string
BigQuery.GeographyScalarType -> pure $ possiblyNullable scalarType $ BigQuery.GeographyValue . BigQuery.Geography <$> P.string
BigQuery.TimestampScalarType -> pure $ possiblyNullable scalarType $ BigQuery.TimestampValue . BigQuery.Timestamp <$> P.string
ty -> throwError $ RQL.internalError $ T.pack $ "Type currently unsupported for BigQuery: " ++ show ty
ColumnEnumReference (EnumReference tableName enumValues) ->
case nonEmpty (Map.toList enumValues) of
Just enumValuesList -> do
tableGQLName <- tableGraphQLName tableName `onLeft` throwError
let enumName = tableGQLName <> $$(G.litName "_enum")
pure $ possiblyNullable BigQuery.StringScalarType $ P.enum enumName Nothing (mkEnumValue <$> enumValuesList)
Nothing -> throw400 ValidationFailed "empty enum values"
where
-- Sadly, this combinator is not sound in general, so we cant export it
-- for general-purpose use. If we did, someone could write this:
--
-- mkParameter <$> opaque do
-- n <- int
-- pure (mkIntColumnValue (n + 1))
--
-- Now wed end up with a UVParameter that has a variable in it, so wed
-- parameterize over it. But when wed reuse the plan, we wouldnt know to
-- increment the value by 1, so wed use the wrong value!
--
-- We could theoretically solve this by retaining a reference to the parser
-- itself and re-parsing each new value, using the saved parser, which
-- would admittedly be neat. But its more complicated, and it isnt clear
-- that it would actually be useful, so for now we dont support it.
opaque :: MonadParse m => Parser 'Both m a -> Parser 'Both m (Opaque a)
opaque parser = parser
{ pParser = \case
P.GraphQLValue (G.VVariable var@Variable{ vInfo, vValue }) -> do
typeCheck False (P.toGraphQLType $ pType parser) var
P.mkOpaque (Just vInfo) <$> pParser parser (absurd <$> vValue)
value -> P.mkOpaque Nothing <$> pParser parser value
}
possiblyNullable _scalarType
| isNullable = fmap (fromMaybe BigQuery.NullValue) . P.nullable
| otherwise = id
mkEnumValue :: (EnumValue, EnumValueInfo) -> (P.Definition P.EnumValueInfo, ScalarValue 'BigQuery)
mkEnumValue (EnumValue value, EnumValueInfo description) =
( P.mkDefinition value (G.Description <$> description) P.EnumValueInfo
, BigQuery.StringValue $ G.unName value
)
throughJSON scalarName =
let schemaType = P.NonNullable $ P.TNamed $ P.mkDefinition scalarName Nothing P.TIScalar
in Parser
{ pType = schemaType
, pParser =
valueToJSON (P.toGraphQLType schemaType) >=>
either (parseErrorWith ParseFailed . qeError) pure . runAesonParser J.parseJSON
}
msJsonPathArg
:: MonadParse n
=> ColumnType 'BigQuery
-> InputFieldsParser n (Maybe (IR.ColumnOp 'BigQuery))
msJsonPathArg _columnType = pure Nothing
msOrderByOperators
:: NonEmpty
( Definition P.EnumValueInfo
, (BasicOrderType 'BigQuery, NullsOrderType 'BigQuery)
)
msOrderByOperators = NE.fromList
[ ( define $$(G.litName "asc") "in ascending order, nulls first"
, (BigQuery.AscOrder, BigQuery.NullsFirst)
)
, ( define $$(G.litName "asc_nulls_first") "in ascending order, nulls first"
, (BigQuery.AscOrder, BigQuery.NullsFirst)
)
, ( define $$(G.litName "asc_nulls_last") "in ascending order, nulls last"
, (BigQuery.AscOrder, BigQuery.NullsLast)
)
, ( define $$(G.litName "desc") "in descending order, nulls last"
, (BigQuery.DescOrder, BigQuery.NullsLast)
)
, ( define $$(G.litName "desc_nulls_first") "in descending order, nulls first"
, (BigQuery.DescOrder, BigQuery.NullsFirst)
)
, ( define $$(G.litName "desc_nulls_last") "in descending order, nulls last"
, (BigQuery.DescOrder, BigQuery.NullsLast)
)
]
where
define name desc = P.mkDefinition name (Just desc) P.EnumValueInfo
msComparisonExps
:: forall m n
. (BackendSchema 'BigQuery, MonadSchema n m, MonadError QErr m)
=> ColumnType 'BigQuery
-> m (Parser 'Input n [ComparisonExp 'BigQuery])
msComparisonExps = P.memoize 'comparisonExps $ \columnType -> do
-- see Note [Columns in comparison expression are never nullable]
typedParser <- columnParser columnType (G.Nullability False)
nullableTextParser <- columnParser (ColumnScalar BigQuery.StringScalarType) (G.Nullability True)
textParser <- columnParser (ColumnScalar BigQuery.StringScalarType) (G.Nullability False)
let name = P.getName typedParser <> $$(G.litName "_BigQuery_comparison_exp")
desc = G.Description $ "Boolean expression to compare columns of type "
<> P.getName typedParser
<<> ". All fields are combined with logical 'AND'."
textListParser = P.list textParser `P.bind` traverse P.openOpaque
columnListParser = P.list typedParser `P.bind` traverse P.openOpaque
pure $ P.object name (Just desc) $ catMaybes <$> sequenceA
[ P.fieldOptional $$(G.litName "_is_null") Nothing (bool ANISNOTNULL ANISNULL <$> P.boolean)
, P.fieldOptional $$(G.litName "_eq") Nothing (AEQ True . mkParameter <$> typedParser)
, P.fieldOptional $$(G.litName "_neq") Nothing (ANE True . mkParameter <$> typedParser)
, P.fieldOptional $$(G.litName "_gt") Nothing (AGT . mkParameter <$> typedParser)
, P.fieldOptional $$(G.litName "_lt") Nothing (ALT . mkParameter <$> typedParser)
, P.fieldOptional $$(G.litName "_gte") Nothing (AGTE . mkParameter <$> typedParser)
, P.fieldOptional $$(G.litName "_lte") Nothing (ALTE . mkParameter <$> typedParser)
]
msOffsetParser :: MonadParse n => Parser 'Both n (SQLExpression 'BigQuery)
msOffsetParser =
BigQuery.ValueExpression . BigQuery.IntegerValue . BigQuery.intToInt64 . fromIntegral <$>
P.int
msMkCountType
:: Maybe Bool
-- ^ distinct values
-> Maybe [Column 'BigQuery]
-> CountType 'BigQuery
msMkCountType _ Nothing = BigQuery.StarCountable
msMkCountType (Just True) (Just cols) =
maybe BigQuery.StarCountable BigQuery.DistinctCountable $ nonEmpty cols
msMkCountType _ (Just cols) =
maybe BigQuery.StarCountable BigQuery.NonNullFieldCountable $ nonEmpty cols
-- | Argument to distinct select on columns returned from table selection
-- > distinct_on: [table_select_column!]
msTableDistinctOn
-- :: forall m n. (BackendSchema 'BigQuery, MonadSchema n m, MonadTableInfo r m, MonadRole r m)
:: Applicative m
=> Applicative n
=> TableName 'BigQuery
-> SelPermInfo 'BigQuery
-> m (InputFieldsParser n (Maybe (XDistinct 'BigQuery, NonEmpty (Column 'BigQuery))))
msTableDistinctOn _table _selectPermissions = pure (pure Nothing)
-- | Various update operators
msUpdateOperators
-- :: forall m n r. (MonadSchema n m, MonadTableInfo r m)
:: Applicative m
=> TableName 'BigQuery -- ^ qualified name of the table
-> UpdPermInfo 'BigQuery -- ^ update permissions of the table
-> m (Maybe (InputFieldsParser n [(Column 'BigQuery, IR.UpdOpExpG (UnpreparedValue 'BigQuery))]))
msUpdateOperators _table _updatePermissions = pure Nothing
-- | Computed field parser.
-- Currently unsupported: returns Nothing for now.
msComputedField
:: MonadBuildSchema 'BigQuery r m n
=> ComputedFieldInfo 'BigQuery
-> SelPermInfo 'BigQuery
-> m (Maybe (FieldParser n (AnnotatedField 'BigQuery)))
msComputedField _fieldInfo _selectPemissions = pure Nothing
-- | Remote join field parser.
-- Currently unsupported: returns Nothing for now.
msRemoteRelationshipField
:: MonadBuildSchema 'BigQuery r m n
=> RemoteFieldInfo 'BigQuery
-> m (Maybe [FieldParser n (AnnotatedField 'BigQuery)])
msRemoteRelationshipField _remoteFieldInfo = pure Nothing
-- | The 'node' root field of a Relay request. Relay is currently unsupported on BigQuery,
-- meaning this parser will never be called: any attempt to create this parser should
-- therefore fail.
msNode
:: MonadBuildSchema 'BigQuery r m n
=> m ( Parser 'Output n
( HashMap
( TableName 'BigQuery)
( SourceName, SourceConfig 'BigQuery
, SelPermInfo 'BigQuery
, PrimaryKeyColumns 'BigQuery
, AnnotatedFields 'BigQuery
)
)
)
msNode = throw500 "BigQuery does not support relay; `node` should never be exposed in the schema."

View File

@ -0,0 +1,81 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.BigQuery.Instances.Transport () where
import qualified Data.Aeson as J
import Hasura.Backends.BigQuery.Instances.Execute ()
import Hasura.Backends.MSSQL.Instances.Execute ()
import Hasura.EncJSON
import Hasura.GraphQL.Logging (MonadQueryLog (..), QueryLog(..), GeneratedQuery(..))
import Hasura.GraphQL.Transport.Backend
import Hasura.GraphQL.Transport.HTTP.Protocol
import qualified Hasura.Logging as L
import Hasura.Prelude
import Hasura.RQL.Types
import Hasura.Server.Types (RequestId)
import Hasura.Session
import Hasura.Tracing
import qualified Hasura.Tracing as Tracing
import qualified Language.GraphQL.Draft.Syntax as G
instance BackendTransport 'BigQuery where
runDBQuery = runQuery
runDBMutation = runMutation
runDBSubscription = error "Not supported."
runQuery
:: ( MonadIO m
, MonadQueryLog m
, MonadTrace m
, MonadError QErr m
)
=> RequestId
-> GQLReqUnparsed
-> G.Name
-> UserInfo
-> L.Logger L.Hasura
-> SourceConfig 'BigQuery
-> Tracing.TraceT (ExceptT QErr IO) EncJSON
-> Maybe Text
-> m (DiffTime, EncJSON)
-- ^ Also return the time spent in the PG query; for telemetry.
runQuery reqId query fieldName _userInfo logger _sourceConfig tx genSql = do
-- log the generated SQL and the graphql query
-- FIXME: fix logging by making logQueryLog expect something backend agnostic!
logQueryLog logger $ mkQueryLog query fieldName genSql reqId
withElapsedTime $
flip Tracing.interpTraceT tx $ \m -> run m
run :: (MonadIO m, MonadError QErr m) => ExceptT QErr IO a -> m a
run action = do
result <- liftIO $ runExceptT action
result `onLeft` throwError
runMutation
:: ( MonadError QErr m
)
=> RequestId
-> GQLReqUnparsed
-> G.Name
-> UserInfo
-> L.Logger L.Hasura
-> SourceConfig 'BigQuery
-> Tracing.TraceT (ExceptT QErr IO) EncJSON
-> Maybe Text
-> m (DiffTime, EncJSON)
-- ^ Also return 'Mutation' when the operation was a mutation, and the time
-- spent in the PG query; for telemetry.
runMutation _reqId _query _fieldName _userInfo _logger _sourceConfig _tx _genSql = -- do
throw500 "BigQuery does not support mutations!"
mkQueryLog
:: GQLReqUnparsed
-> G.Name
-> Maybe Text
-> RequestId
-> QueryLog
mkQueryLog gqlQuery fieldName preparedSql requestId =
QueryLog gqlQuery ((fieldName,) <$> generatedQuery) requestId
where
generatedQuery = preparedSql <&> \qs -> GeneratedQuery qs J.Null

View File

@ -0,0 +1,86 @@
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.BigQuery.Instances.Types where
import Data.Coerce
import Data.Functor.Const
import Hasura.Prelude
import Hasura.SQL.Types
import qualified Text.Builder as TB
import qualified Language.GraphQL.Draft.Syntax as G
import Data.Aeson
import qualified Hasura.Backends.BigQuery.Types as BigQuery
import qualified Hasura.Backends.BigQuery.Source as BigQuery
import qualified Hasura.Backends.BigQuery.ToQuery as BigQuery (toTextPretty, fromExpression)
import Hasura.RQL.DDL.Headers ()
import Hasura.RQL.Types.Backend
import Hasura.RQL.Types.Error
import Hasura.SQL.Backend
instance ToSQL BigQuery.Expression where
toSQL = TB.text . BigQuery.toTextPretty . BigQuery.fromExpression
instance Backend 'BigQuery where
type SourceConfig 'BigQuery = BigQuery.BigQuerySourceConfig
type SourceConnConfiguration 'BigQuery = BigQuery.BigQueryConnSourceConfig
type Identifier 'BigQuery = Void
type Alias 'BigQuery = BigQuery.EntityAlias
type TableName 'BigQuery = BigQuery.TableName
type FunctionName 'BigQuery = BigQuery.FunctionName
type FunctionArgType 'BigQuery = Void
type ConstraintName 'BigQuery = Void
type BasicOrderType 'BigQuery = BigQuery.Order
type NullsOrderType 'BigQuery = BigQuery.NullsOrder
type CountType 'BigQuery = BigQuery.Countable BigQuery.ColumnName
type Column 'BigQuery = BigQuery.ColumnName
type ScalarValue 'BigQuery = BigQuery.Value
type ScalarType 'BigQuery = BigQuery.ScalarType
type SQLExpression 'BigQuery = BigQuery.Expression
type SQLOperator 'BigQuery = BigQuery.Op
type BooleanOperators 'BigQuery = Const Void
type XComputedField 'BigQuery = Void
type XRemoteField 'BigQuery = Void
type XRelay 'BigQuery = Void
type XNodesAgg 'BigQuery = XEnable
type XDistinct 'BigQuery = Void
functionArgScalarType :: FunctionArgType 'BigQuery -> ScalarType 'BigQuery
functionArgScalarType = absurd
isComparableType :: ScalarType 'BigQuery -> Bool
isComparableType = BigQuery.isComparableType
isNumType :: ScalarType 'BigQuery -> Bool
isNumType = BigQuery.isNumType
textToScalarValue :: Maybe Text -> ScalarValue 'BigQuery
textToScalarValue = maybe BigQuery.NullValue BigQuery.StringValue
parseScalarValue :: ScalarType 'BigQuery -> Value -> Either QErr (ScalarValue 'BigQuery)
parseScalarValue = BigQuery.parseScalarValue
scalarValueToJSON :: ScalarValue 'BigQuery -> Value
scalarValueToJSON = error "scalarValueToJSON"
functionToTable :: FunctionName 'BigQuery -> TableName 'BigQuery
functionToTable = error "functionToTable"
tableToFunction :: TableName 'BigQuery -> FunctionName 'BigQuery
tableToFunction = coerce . BigQuery.tableName
tableGraphQLName :: TableName 'BigQuery -> Either QErr G.Name
tableGraphQLName = BigQuery.getGQLTableName
functionGraphQLName :: FunctionName 'BigQuery -> Either QErr G.Name
functionGraphQLName = error "functionGraphQLName"
scalarTypeGraphQLName :: ScalarType 'BigQuery -> Either QErr G.Name
scalarTypeGraphQLName = error "scalarTypeGraphQLName"
snakeCaseTableName :: TableName 'BigQuery -> Text
snakeCaseTableName = error "snakeCaseTableName"

View File

@ -0,0 +1,265 @@
{-# LANGUAGE DuplicateRecordFields #-}
-- |
module Hasura.Backends.BigQuery.Meta
( MetadataError(..)
, getTables
, RestTableReference(..)
, RestTable(..)
, RestTableSchema(..)
, RestFieldSchema(..)
, RestType(..)
, Mode(..)
) where
import Control.Exception.Safe
import Control.Monad.IO.Class
import Control.Monad.Trans.Except
import Data.Aeson
import qualified Data.Aeson as Aeson
import Data.Foldable
import Data.Maybe
import qualified Data.Sequence as Seq
import Data.Text (Text)
import qualified Data.Text as T
import GHC.Generics
import Hasura.Backends.BigQuery.Connection
import Hasura.Backends.BigQuery.Source
import Network.HTTP.Simple
import Network.HTTP.Types
import Prelude
--------------------------------------------------------------------------------
-- Types
data MetadataError
= RestProblem RestProblem
deriving (Show)
data RestProblem
= GetTablesProblem SomeException
| GetTableProblem SomeException
| GetMetaDecodeProblem String
| GetTablesBigQueryProblem BigQueryProblem
| RESTRequestNonOK Status
deriving (Show)
data RestTableList = RestTableList
{ nextPageToken :: Maybe Text
, tables :: [RestTableBrief]
} deriving (Show)
instance FromJSON RestTableList where
parseJSON =
withObject
"RestTableList"
(\o -> do
kind <- o .: "kind"
case kind of
("bigquery#tableList"::Text) -> do
nextPageToken <- o .:? "nextPageToken"
tables <- o .: "tables"
pure RestTableList {..}
_ -> fail "Expected kind of bigquery#tableList")
data RestTableBrief = RestTableBrief
{ tableReference :: RestTableReference
} deriving (Show, Generic)
instance FromJSON RestTableBrief
data RestTableReference = RestTableReference
{ datasetId :: Text
, projectId :: Text
, tableId :: Text
} deriving (Show, Generic)
instance FromJSON RestTableReference
data RestTable = RestTable
{ tableReference :: RestTableReference
, schema :: RestTableSchema
} deriving (Show, Generic)
instance FromJSON RestTable
data RestTableSchema = RestTableSchema
{ fields :: [RestFieldSchema]
} deriving (Show, Generic)
instance FromJSON RestTableSchema
data RestFieldSchema = RestFieldSchema
{ name :: Text
, type' :: RestType
-- ^ The field data type. Possible values include STRING, BYTES,
-- INTEGER, INT64 (same as INTEGER), FLOAT, FLOAT64 (same as
-- FLOAT), BOOLEAN, BOOL (same as BOOLEAN), TIMESTAMP, DATE, TIME,
-- DATETIME, GEOGRAPHY, NUMERIC, RECORD (where RECORD indicates
-- that the field contains a nested schema) or STRUCT (same as
-- RECORD).
, mode :: Mode
-- The field mode. Possible values include NULLABLE, REQUIRED and
-- REPEATED. The default value is NULLABLE.
} deriving (Show, Generic)
instance FromJSON RestFieldSchema where
parseJSON =
withObject
"RestFieldSchema"
(\o -> do
type' <- o .: "type"
name <- o .: "name"
mode <- fmap (fromMaybe Nullable) (o .:? "mode")
pure RestFieldSchema {..})
data Mode = Nullable | Required | Repeated deriving (Show)
instance FromJSON Mode where
parseJSON j = do
s <- parseJSON j
case s :: Text of
"NULLABLE" -> pure Nullable
"REQUIRED" -> pure Required
"REPEATED" -> pure Repeated
_ -> fail ("invalid mode " ++ show s)
data RestType
= STRING
| BYTES
| INTEGER
| FLOAT
| BOOL
| TIMESTAMP
| DATE
| TIME
| DATETIME
| GEOGRAPHY
| DECIMAL
| BIGDECIMAL
| STRUCT -- (same as RECORD).
deriving (Show)
instance FromJSON RestType where
parseJSON j = do
s <- parseJSON j
case s :: Text of
"STRING" -> pure STRING
"BYTES" -> pure BYTES
"INTEGER" -> pure INTEGER
"INT64" -> pure INTEGER
"FLOAT" -> pure FLOAT
"FLOAT64" -> pure FLOAT
"BOOLEAN" -> pure BOOL
"BOOL" -> pure BOOL
"TIMESTAMP" -> pure TIMESTAMP
"DATE" -> pure DATE
"TIME" -> pure TIME
"DATETIME" -> pure DATETIME
"GEOGRAPHY" -> pure GEOGRAPHY
"NUMERIC" -> pure DECIMAL
"DECIMAL" -> pure DECIMAL
"BIGNUMERIC" -> pure BIGDECIMAL
"BIGDECIMAL" -> pure BIGDECIMAL
"RECORD" -> pure STRUCT
"STRUCT" -> pure STRUCT
_ -> fail ("invalid type " ++ show s)
--------------------------------------------------------------------------------
-- REST request
-- | Get all tables from all specified data sets.
getTables ::
MonadIO m
=> BigQuerySourceConfig
-> m (Either RestProblem [RestTable])
getTables sc@BigQuerySourceConfig {..} =
runExceptT
(fmap concat (traverse (ExceptT . getTablesForDataSet sc) _scDatasets))
-- | Get tables in the dataset.
getTablesForDataSet ::
MonadIO m
=> BigQuerySourceConfig
-> Text
-> m (Either RestProblem [RestTable])
getTablesForDataSet sc@BigQuerySourceConfig{..} dataSet = do
result <-
liftIO (catchAny (run Nothing mempty) (pure . Left . GetTablesProblem))
case result of
Left e -> pure (Left e)
Right briefs ->
fmap
sequence
(traverse
(\RestTableBrief {tableReference = RestTableReference {tableId}} ->
getTable sc dataSet tableId)
briefs)
where
run pageToken acc = do
let req = setRequestHeader "Content-Type" ["application/json"]
$ parseRequest_ url
eResp <- runBigQuery sc req
case eResp of
Left e -> pure (Left (GetTablesBigQueryProblem e))
Right resp ->
case getResponseStatusCode resp of
200 ->
case Aeson.eitherDecode (getResponseBody resp) of
Left e -> pure (Left (GetMetaDecodeProblem e))
Right RestTableList {nextPageToken, tables} ->
case nextPageToken of
Nothing -> pure (Right (toList (acc <> Seq.fromList tables)))
Just token -> run (pure token) (acc <> Seq.fromList tables)
_ -> pure (Left (RESTRequestNonOK (getResponseStatus resp)))
where
url =
"GET https://bigquery.googleapis.com/bigquery/v2/projects/" <>
T.unpack _scProjectId <>
"/datasets/" <>
T.unpack dataSet <>
"/tables?alt=json&key=" <>
-- T.unpack apiToken <>
"&" <>
T.unpack (encodeParams extraParameters)
extraParameters = pageTokenParam
where
pageTokenParam =
case pageToken of
Nothing -> []
Just token -> [("pageToken", token)]
-- | Get tables in the schema.
getTable ::
MonadIO m
=> BigQuerySourceConfig
-> Text -> Text
-> m (Either RestProblem RestTable)
getTable sc@BigQuerySourceConfig {..} dataSet tableId = do
liftIO (catchAny run (pure . Left . GetTableProblem))
where
run = do
let req =
setRequestHeader "Content-Type" ["application/json"] $
parseRequest_ url
eResp <- runBigQuery sc req
case eResp of
Left e -> pure (Left (GetTablesBigQueryProblem e))
Right resp ->
case getResponseStatusCode resp of
200 ->
case Aeson.eitherDecode (getResponseBody resp) of
Left e -> pure (Left (GetMetaDecodeProblem e))
Right table -> pure (Right table)
_ -> pure (Left (RESTRequestNonOK (getResponseStatus resp)))
where
url =
"GET https://bigquery.googleapis.com/bigquery/v2/projects/" <>
T.unpack _scProjectId <>
"/datasets/" <>
T.unpack dataSet <>
"/tables/" <>
T.unpack tableId <>
"?alt=json&key=" <>
-- T.unpack apiToken <>
"&" <>
T.unpack (encodeParams extraParameters)
extraParameters = []
encodeParams :: [(Text, Text)] -> Text
encodeParams = T.intercalate "&" . map (\(k, v) -> k <> "=" <> v)

View File

@ -0,0 +1,93 @@
-- | Planning T-SQL queries and subscriptions.
module Hasura.Backends.BigQuery.Plan
( planNoPlan
, planToForest
) where
import Data.Aeson.Text
import qualified Data.Text.Lazy as LT
import Data.Tree
import qualified Hasura.Backends.BigQuery.DataLoader.Plan as DataLoader
import Hasura.Prelude hiding (first)
import Hasura.SQL.Types
import Hasura.Session
import Control.Monad.Validate
import Data.Text.Extended
import qualified Hasura.GraphQL.Parser as GraphQL
import qualified Hasura.RQL.Types.Column as RQL
import Hasura.Backends.BigQuery.FromIr as BigQuery
import Hasura.Backends.BigQuery.Types as BigQuery
import Hasura.GraphQL.Context
import qualified Hasura.RQL.Types.Error as RQL
import Hasura.SQL.Backend
-- --------------------------------------------------------------------------------
-- -- Top-level planner
planToForest ::
MonadError RQL.QErr m
=> UserInfo
-> QueryDB 'BigQuery (GraphQL.UnpreparedValue 'BigQuery)
-> m (Forest DataLoader.PlannedAction)
planToForest userInfo qrf = do
select <- planNoPlan userInfo qrf
let (!_headAndTail, !plannedActionsList) =
DataLoader.runPlan
(DataLoader.planSelectHeadAndTail Nothing Nothing select)
!actionsForest = DataLoader.actionsForest id plannedActionsList
pure actionsForest
planNoPlan ::
MonadError RQL.QErr m
=> UserInfo
-> QueryDB 'BigQuery (GraphQL.UnpreparedValue 'BigQuery)
-> m Select
planNoPlan userInfo queryDB = do
rootField <- traverseQueryDB (prepareValueNoPlan (_uiSession userInfo)) queryDB
select <-
runValidate (BigQuery.runFromIr (BigQuery.fromRootField rootField))
`onLeft` (RQL.throw400 RQL.NotSupported . (tshow :: NonEmpty Error -> Text))
pure
select
{ selectFor =
case selectFor select of
NoFor -> NoFor
JsonFor forJson -> JsonFor forJson {jsonRoot = Root "root"}
}
--------------------------------------------------------------------------------
-- Resolving values
-- | Prepare a value without any query planning; we just execute the
-- query with the values embedded.
prepareValueNoPlan ::
(MonadError RQL.QErr m)
=> SessionVariables
-> GraphQL.UnpreparedValue 'BigQuery
-> m BigQuery.Expression
prepareValueNoPlan sessionVariables =
\case
GraphQL.UVLiteral x -> pure x
GraphQL.UVSession -> pure globalSessionExpression
-- To be honest, I'm not sure if it's indeed the JSON_VALUE operator we need here...
GraphQL.UVSessionVar typ text ->
case typ of
CollectableTypeScalar scalarType ->
pure
(CastExpression
(JsonValueExpression
globalSessionExpression
(FieldPath RootPath (toTxt text)))
scalarType)
CollectableTypeArray {} ->
throwError $ RQL.internalError "Cannot currently prepare array types in BigQuery."
GraphQL.UVParameter _ RQL.ColumnValue {..} -> pure (ValueExpression cvValue)
where
globalSessionExpression =
ValueExpression
(StringValue (LT.toStrict (encodeToLazyText sessionVariables)))

View File

@ -0,0 +1,206 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE NoGeneralisedNewtypeDeriving #-}
{-# OPTIONS_GHC -fno-warn-orphans #-}
module Hasura.Backends.BigQuery.Source where
import Control.Concurrent.MVar
import Control.DeepSeq
import qualified Crypto.PubKey.RSA.Types as Cry
import qualified Data.Aeson as J
import qualified Data.Aeson.Casing as J
import qualified Data.Aeson.TH as J
import qualified Data.ByteString.Lazy as BL
import Data.Hashable (hashWithSalt)
import qualified Data.HashMap.Strict as HM
import Data.Text (pack)
import qualified Data.Text.Encoding as TE
import qualified Data.X509 as X509
import qualified Data.X509.Memory as X509
import Hasura.Incremental (Cacheable (..))
import Hasura.Prelude
import qualified System.Environment as SE (getEnv)
import System.FilePath (isRelative)
import System.IO.Unsafe (unsafePerformIO)
data PKey = PKey
{ unPKey :: Cry.PrivateKey
, originalBS :: Text
}
deriving (Show, Eq, Data, Generic, NFData, Hashable)
deriving instance Generic Cry.PrivateKey -- orphan
deriving instance Generic Cry.PublicKey -- orphan
deriving instance J.ToJSON Cry.PrivateKey -- orphan
deriving instance J.ToJSON Cry.PublicKey -- orphan
deriving instance Hashable Cry.PrivateKey -- orphan
deriving instance Hashable Cry.PublicKey -- orphan
instance Arbitrary Cry.PrivateKey where -- orphan
arbitrary = genericArbitrary
instance Arbitrary Cry.PublicKey where -- orphan
arbitrary = genericArbitrary
instance Arbitrary PKey where
arbitrary = genericArbitrary
instance J.FromJSON PKey where
parseJSON = J.withText "private_key" $ \k ->
case X509.readKeyFileFromMemory $ TE.encodeUtf8 k of
[X509.PrivKeyRSA k'] -> return $ PKey k' k
_ -> fail "unable to parse private key"
instance J.ToJSON PKey where
toJSON PKey{..} = J.toJSON originalBS
newtype GoogleAccessToken
= GoogleAccessToken Text
deriving (Show, Eq, J.FromJSON, J.ToJSON, Hashable, Generic, Data, NFData)
instance Arbitrary GoogleAccessToken where
arbitrary = genericArbitrary
data TokenResp
= TokenResp
{ _trAccessToken :: !GoogleAccessToken
, _trExpiresAt :: !Integer -- Number of seconds until expiry from `now`, but we add `now` seconds to this for easy tracking
} deriving (Eq, Show, Data, NFData, Generic, Hashable)
instance J.FromJSON TokenResp where
parseJSON = J.withObject "TokenResp" $ \o -> TokenResp
<$> o J..: "access_token"
<*> o J..: "expires_in"
instance Arbitrary TokenResp where
arbitrary = genericArbitrary
dummyTokenResp :: TokenResp
dummyTokenResp = TokenResp (GoogleAccessToken "DUMMY") 0
data ServiceAccount
= ServiceAccount
{ _saClientEmail :: !Text
, _saPrivateKey :: !PKey
, _saProjectId :: !Text
} deriving (Eq, Show, Data, NFData, Generic, Hashable)
$(J.deriveJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=False} ''ServiceAccount)
instance Arbitrary ServiceAccount where
arbitrary = genericArbitrary
data ConfigurationJSON a
= FromEnvJSON Text
| FromYamlJSON a
deriving stock (Show, Eq, Generic)
deriving (NFData, Hashable)
instance Arbitrary a => Arbitrary (ConfigurationJSON a) where
arbitrary = genericArbitrary
instance J.FromJSON a => J.FromJSON (ConfigurationJSON a) where
parseJSON = \case
J.Object o | Just (J.String text) <- HM.lookup "from_env" o -> pure (FromEnvJSON text)
J.String s -> case J.eitherDecode . BL.fromStrict . TE.encodeUtf8 $ s of
Left {} -> fail "error parsing configuration json"
Right sa -> pure sa
j -> fmap FromYamlJSON (J.parseJSON j)
instance J.ToJSON a => J.ToJSON (ConfigurationJSON a) where
toJSON = \case
FromEnvJSON i -> J.object ["from_env" J..= i]
FromYamlJSON j -> J.toJSON j
-- | Configuration inputs when they are a YAML array or an Env var whos value is
-- a comma-separated string
data ConfigurationInputs
= FromYamls ![Text]
| FromEnvs !Text
deriving stock (Show, Eq, Generic)
deriving (NFData, Hashable)
instance Arbitrary ConfigurationInputs where
arbitrary = genericArbitrary
instance J.ToJSON ConfigurationInputs where
toJSON = \case
FromYamls i -> J.toJSON i
FromEnvs i -> J.object ["from_env" J..= i]
instance J.FromJSON ConfigurationInputs where
parseJSON = \case
J.Object o -> FromEnvs <$> o J..: "from_env"
s@(J.Array _) -> FromYamls <$> J.parseJSON s
_ -> fail "one of array or object must be provided"
-- | Configuration input when the YAML value as well as the Env var have
-- singlular values
data ConfigurationInput
= FromYaml !Text
| FromEnv !Text
deriving stock (Show, Eq, Generic)
deriving (NFData, Hashable)
instance Arbitrary ConfigurationInput where
arbitrary = genericArbitrary
instance J.ToJSON ConfigurationInput where
toJSON = \case
FromYaml i -> J.toJSON i
FromEnv i -> J.object ["from_env" J..= i]
instance J.FromJSON ConfigurationInput where
parseJSON = \case
J.Object o -> FromEnv <$> o J..: "from_env"
s@(J.String _) -> FromYaml <$> J.parseJSON s
_ -> fail "one of string or object must be provided"
data BigQueryConnSourceConfig
= BigQueryConnSourceConfig
{ _cscServiceAccount :: !(ConfigurationJSON ServiceAccount)
, _cscDatasets :: !ConfigurationInputs
, _cscProjectId :: !ConfigurationInput -- this is part of service-account.json, but we put it here on purpose
} deriving (Eq, Generic, NFData)
$(J.deriveJSON (J.aesonDrop 4 J.snakeCase){J.omitNothingFields=True} ''BigQueryConnSourceConfig)
deriving instance Show BigQueryConnSourceConfig
deriving instance Hashable BigQueryConnSourceConfig
instance Arbitrary BigQueryConnSourceConfig where
arbitrary = genericArbitrary
instance Cacheable BigQueryConnSourceConfig where
unchanged _ = (==)
data BigQuerySourceConfig
= BigQuerySourceConfig
{ _scServiceAccount :: !ServiceAccount
, _scDatasets :: ![Text]
, _scProjectId :: !Text -- this is part of service-account.json, but we put it here on purpose
, _scAccessTokenMVar :: !(MVar (Maybe TokenResp))
} deriving (Eq, Generic, NFData)
$(J.deriveJSON (J.aesonDrop 3 J.snakeCase){J.omitNothingFields=True} ''BigQuerySourceConfig)
deriving instance Show BigQuerySourceConfig
deriving instance Hashable BigQuerySourceConfig
instance Arbitrary BigQuerySourceConfig where
arbitrary = genericArbitrary
instance Cacheable BigQuerySourceConfig where
unchanged _ = (==)
instance Show (MVar (Maybe TokenResp)) where
-- show = maybe "NOTHING" (const "_REDACTED_") . unsafePerformIO . readIORef
show = (const "_REDACTED_")
instance J.FromJSON (MVar (Maybe TokenResp)) where
parseJSON _ = pure $ unsafePerformIO $ newEmptyMVar
instance J.ToJSON (MVar (Maybe TokenResp)) where
toJSON _ = J.String "_REDACTED_"
instance Hashable (MVar (Maybe TokenResp)) where
hashWithSalt i r = hashWithSalt i (unsafePerformIO $ readMVar r)
instance Arbitrary (MVar (Maybe TokenResp)) where
arbitrary = genericArbitrary @(Maybe TokenResp) <&> (unsafePerformIO . newMVar)
-- | for testing
getBigQuerySourceConfigEnv :: IO BigQuerySourceConfig
getBigQuerySourceConfigEnv = do
_scServiceAccountFilePath <- getEnvUnline _safpEnvKey
if isRelative _scServiceAccountFilePath
then error $ _safpEnvKey <> " needs to be an absolute file-path"
else do
_scDatasets <- pure . pack <$> getEnvUnline "HASURA_BIGQUERY_DATASET"
_scProjectId <- pack <$> getEnvUnline "HASURA_BIGQUERY_PROJECT_ID"
_scServiceAccount :: ServiceAccount <- either error id . J.eitherDecode' <$> BL.readFile _scServiceAccountFilePath
_scAccessTokenMVar <- newMVar Nothing
pure BigQuerySourceConfig {..}
where
_safpEnvKey = "HASURA_BIGQUERY_SERVICE_ACCOUNT_FILE_PATH"
getEnvUnline key = fmap (concat . take 1 . lines) (SE.getEnv key)

View File

@ -0,0 +1,457 @@
{-# LANGUAGE OverloadedStrings #-}
-- | Convert the simple BigQuery AST to an SQL query, ready to be passed
-- to the odbc package's query/exec functions.
module Hasura.Backends.BigQuery.ToQuery
( fromSelect
, fromReselect
, fromExpression
, toBuilderFlat
, toBuilderPretty
, toTextFlat
, toTextPretty
, Printer(..)
, renderBuilderFlat
, renderBuilderPretty
, paramName
) where
import Control.Monad.State.Strict
import Data.Bifunctor
import Data.Foldable
import Data.HashMap.Strict.InsOrd (InsOrdHashMap)
import qualified Data.HashMap.Strict.InsOrd as OMap
import Data.List (intersperse)
import Data.List.NonEmpty (NonEmpty(..))
import qualified Data.List.NonEmpty as NE
import Data.Maybe
import Data.String
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import Data.Text.Lazy.Builder (Builder)
import qualified Data.Text.Lazy.Builder as LT
import Data.Tuple
import qualified Data.Vector as V
import Hasura.Backends.BigQuery.Types
import Prelude
--------------------------------------------------------------------------------
-- Types
data Printer
= SeqPrinter [Printer]
| SepByPrinter Printer [Printer]
| NewlinePrinter
| UnsafeTextPrinter Text
| IndentPrinter Int Printer
| ValuePrinter Value
deriving (Show, Eq)
instance IsString Printer where
fromString = UnsafeTextPrinter . fromString
(<+>) :: Printer -> Printer -> Printer
(<+>) x y = SeqPrinter [x,y]
--------------------------------------------------------------------------------
-- Printer generators
fromExpression :: Expression -> Printer
fromExpression =
\case
CastExpression e scalarType ->
"CAST(" <+> fromExpression e <+> " AS " <+> fromScalarType scalarType <+> ")"
InExpression e value ->
"(" <+> fromExpression e <+> ") IN UNNEST(" <+> fromValue value <+> ")"
JsonQueryExpression e -> "JSON_QUERY(" <+> fromExpression e <+> ")"
JsonValueExpression e path ->
"JSON_VALUE(" <+> fromExpression e <+> fromPath path <+> ")"
ValueExpression value -> fromValue value
AndExpression xs ->
SepByPrinter
(NewlinePrinter <+> "AND ")
(toList
(fmap
(\x -> "(" <+> fromExpression x <+> ")")
(fromMaybe (pure trueExpression) (NE.nonEmpty xs))))
OrExpression xs ->
SepByPrinter
(NewlinePrinter <+> " OR ")
(toList
(fmap
(\x -> "(" <+> fromExpression x <+> ")")
(fromMaybe (pure falseExpression) (NE.nonEmpty xs))))
NotExpression expression -> "NOT " <+> (fromExpression expression)
ExistsExpression select ->
"EXISTS (" <+> IndentPrinter 9 (fromSelect select) <+> ")"
IsNullExpression expression ->
"(" <+> fromExpression expression <+> ") IS NULL"
IsNotNullExpression expression ->
"(" <+> fromExpression expression <+> ") IS NOT NULL"
ColumnExpression fieldName -> fromFieldName fieldName
EqualExpression x y ->
"(" <+> fromExpression x <+> ") = (" <+> fromExpression y <+> ")"
NotEqualExpression x y ->
"(" <+> fromExpression x <+> ") != (" <+> fromExpression y <+> ")"
ToStringExpression e -> "CONCAT(" <+> fromExpression e <+> ", '')"
SelectExpression s -> "(" <+> IndentPrinter 1 (fromSelect s) <+> ")"
OpExpression op x y ->
"(" <+>
fromExpression x <+>
") " <+> fromOp op <+> " (" <+> fromExpression y <+> ")"
ConditionalProjection expression fieldName ->
"(CASE WHEN(" <+> fromExpression expression <+>
") THEN " <+> fromFieldName fieldName <+>
" ELSE NULL END)"
fromScalarType :: ScalarType -> Printer
fromScalarType =
\case
StringScalarType -> "STRING"
BytesScalarType -> "BYTES"
IntegerScalarType -> "INT64"
FloatScalarType -> "FLOAT64"
BoolScalarType -> "BOOL"
TimestampScalarType -> "TIMESTAMP"
DateScalarType -> "DATE"
TimeScalarType -> "TIME"
DatetimeScalarType -> "DATETIME"
GeographyScalarType -> "GEOGRAPHY"
StructScalarType -> "STRUCT"
DecimalScalarType -> "DECIMAL"
BigDecimalScalarType -> "BIGDECIMAL"
fromOp :: Op -> Printer
fromOp =
\case
LessOp -> "<"
MoreOp -> ">"
MoreOrEqualOp -> ">="
LessOrEqualOp -> "<="
fromPath :: JsonPath -> Printer
fromPath path =
", " <+> string path
where
string = fromExpression .
ValueExpression . StringValue . LT.toStrict . LT.toLazyText . go
go =
\case
RootPath -> "$"
IndexPath r i -> go r <> "[" <> LT.fromString (show i) <> "]"
FieldPath r f -> go r <> "." <> LT.fromText f
fromFieldName :: FieldName -> Printer
fromFieldName (FieldName {..}) =
fromNameText fieldNameEntity <+> "." <+> fromNameText fieldName
fromSelect :: Select -> Printer
fromSelect Select {..} = finalExpression
where
finalExpression = inner
projections =
SepByPrinter
("," <+> NewlinePrinter)
(map fromProjection (toList selectProjections))
inner =
SepByPrinter
NewlinePrinter
[ "SELECT " <+> IndentPrinter 7 projections
, "FROM " <+> IndentPrinter 5 (fromFrom selectFrom)
, SepByPrinter
NewlinePrinter
(map
(\Join {..} ->
SeqPrinter
[ "LEFT OUTER JOIN " <+>
IndentPrinter 16 (fromJoinSource joinSource)
, NewlinePrinter
, "AS " <+> fromJoinAlias joinAlias
, NewlinePrinter
, "ON (" <+>
IndentPrinter
4
(SepByPrinter (", " <+> NewlinePrinter) (map fromOn joinOn)) <+>
")"
])
selectJoins)
, fromWhere selectWhere
, fromOrderBys selectTop selectOffset selectOrderBy
, case selectGroupBy of
[] -> ""
fieldNames -> "GROUP BY " <+> SepByPrinter ", " (map fromFieldName fieldNames)
]
fromOn :: (FieldName, FieldName) -> Printer
fromOn (x,y) = fromFieldName x <+> " = " <+> fromFieldName y
fromJoinSource :: JoinSource -> Printer
fromJoinSource =
\case
JoinSelect select -> "(" <+> fromSelect select <+> ")"
-- We're not using existingJoins at the moment, which was used to
-- avoid re-joining on the same table twice.
-- JoinReselect reselect -> "(" <+> fromReselect reselect <+> ")"
fromReselect :: Reselect -> Printer
fromReselect Reselect {..} =
SepByPrinter
NewlinePrinter
[ "SELECT " <+>
IndentPrinter 7 projections
, fromWhere reselectWhere
]
where
projections =
SepByPrinter
("," <+> NewlinePrinter)
(map fromProjection (toList reselectProjections))
fromOrderBys ::
Top -> Maybe Expression -> Maybe (NonEmpty OrderBy) -> Printer
fromOrderBys NoTop Nothing Nothing = "" -- An ORDER BY is wasteful if not needed.
fromOrderBys top moffset morderBys =
SepByPrinter
NewlinePrinter
[ case morderBys of
Nothing -> ""
Just orderBys ->
SeqPrinter
[ "ORDER BY "
, SepByPrinter
("," <+> NewlinePrinter)
(map fromOrderBy (toList orderBys))
]
, case (top, moffset) of
(NoTop, Nothing) -> ""
(NoTop, Just offset) -> "OFFSET " <+> fromExpression offset
(Top n, Nothing) -> "LIMIT " <+> fromValue (IntegerValue (intToInt64 n))
(Top n, Just offset) ->
"OFFSET " <+>
fromExpression offset <+>
" LIMIT " <+> fromValue (IntegerValue (intToInt64 n))
]
fromOrderBy :: OrderBy -> Printer
fromOrderBy OrderBy {..} =
"(" <+>
fromFieldName orderByFieldName <+>
") " <+>
fromOrder orderByOrder <+>
fromNullsOrder orderByNullsOrder
fromOrder :: Order -> Printer
fromOrder =
\case
AscOrder -> "ASC"
DescOrder -> "DESC"
fromNullsOrder :: NullsOrder -> Printer
fromNullsOrder =
\case
NullsAnyOrder -> ""
NullsFirst -> " NULLS FIRST"
NullsLast -> " NULLS LAST"
fromJoinAlias :: EntityAlias -> Printer
fromJoinAlias EntityAlias {entityAliasText} =
fromNameText entityAliasText
fromProjection :: Projection -> Printer
fromProjection =
\case
ExpressionProjection aliasedExpression ->
fromAliased (fmap fromExpression aliasedExpression)
FieldNameProjection aliasedFieldName ->
fromAliased (fmap fromFieldName aliasedFieldName)
AggregateProjection aliasedAggregate ->
fromAliased (fmap fromAggregate aliasedAggregate)
StarProjection -> "*"
ArrayAggProjection aliasedAgg -> fromAliased (fmap fromArrayAgg aliasedAgg)
EntityProjection aliasedEntity -> fromAliased (fmap fromJoinAlias aliasedEntity)
fromArrayAgg :: ArrayAgg -> Printer
fromArrayAgg ArrayAgg {..} =
SeqPrinter
[ "ARRAY_AGG("
, SepByPrinter
" "
[ "STRUCT(" <+> projections <+> ")"
, fromOrderBys
arrayAggTop
arrayAggOffset
(fmap
(fmap
(\orderBy ->
orderBy
{ orderByNullsOrder = NullsAnyOrder
-- Because BigQuery reports:
-- > NULLS FIRST not supported with descending sort order in aggregate functions
-- And the same error with 'ascending'.
}))
arrayAggOrderBy)
]
, ")"
]
where
projections =
SepByPrinter
("," <+> NewlinePrinter)
(map fromProjection (toList arrayAggProjections))
fromAggregate :: Aggregate -> Printer
fromAggregate =
\case
CountAggregate countable -> "COUNT(" <+> fromCountable countable <+> ")"
OpAggregate text arg ->
UnsafeTextPrinter text <+> "(" <+> fromExpression arg <+> ")"
OpAggregates text args ->
"STRUCT(" <+>
SepByPrinter
", "
(map
(\(alias, arg) ->
UnsafeTextPrinter text <+>
"(" <+> fromExpression arg <+> ") AS " <+> fromNameText alias)
(toList args)) <+>
")"
TextAggregate text -> fromExpression (ValueExpression (StringValue text))
fromCountable :: Countable FieldName -> Printer
fromCountable =
\case
StarCountable -> "*"
NonNullFieldCountable fields ->
SepByPrinter ", " (map fromFieldName (toList fields))
DistinctCountable fields ->
"DISTINCT " <+>
SepByPrinter ", " (map fromFieldName (toList fields))
fromWhere :: Where -> Printer
fromWhere =
\case
Where expressions ->
case (filter ((/= trueExpression) . collapse)) expressions of
[] -> ""
collapsedExpressions ->
"WHERE " <+>
IndentPrinter 6 (fromExpression (AndExpression collapsedExpressions))
where collapse (AndExpression [x]) = collapse x
collapse (AndExpression []) = trueExpression
collapse (OrExpression [x]) = collapse x
collapse x = x
fromFrom :: From -> Printer
fromFrom =
\case
FromQualifiedTable aliasedQualifiedTableName ->
fromAliased (fmap fromTableName aliasedQualifiedTableName)
fromTableName :: TableName -> Printer
fromTableName TableName {tableName, tableNameSchema} =
fromNameText tableNameSchema <+> "." <+> fromNameText tableName
fromAliased :: Aliased Printer -> Printer
fromAliased Aliased {..} =
aliasedThing <+>
((" AS " <+>) . fromNameText) aliasedAlias
fromNameText :: Text -> Printer
fromNameText t = UnsafeTextPrinter ("`" <> t <> "`")
trueExpression :: Expression
trueExpression = ValueExpression (BoolValue True)
falseExpression :: Expression
falseExpression = ValueExpression (BoolValue False)
fromValue :: Value -> Printer
fromValue = ValuePrinter
--------------------------------------------------------------------------------
-- Quick and easy query printer
toBuilderFlat :: Printer -> Builder
toBuilderFlat = flip evalState mempty . runBuilderFlat
toBuilderPretty :: Printer -> Builder
toBuilderPretty = flip evalState mempty . runBuilderPretty
toTextPretty :: Printer -> Text
toTextPretty = LT.toStrict . LT.toLazyText . toBuilderPretty
toTextFlat :: Printer -> Text
toTextFlat = LT.toStrict . LT.toLazyText . toBuilderFlat
--------------------------------------------------------------------------------
-- Printer ready for consumption
-- | Produces a query with holes, and a mapping for each
renderBuilderFlat :: Printer -> (Builder, InsOrdHashMap Int Value)
renderBuilderFlat =
second (OMap.fromList . map swap . OMap.toList) . flip runState mempty .
runBuilderFlat
-- | Produces a query with holes, and a mapping for each
renderBuilderPretty :: Printer -> (Builder, InsOrdHashMap Int Value)
renderBuilderPretty =
second (OMap.fromList . map swap . OMap.toList) . flip runState mempty .
runBuilderPretty
--------------------------------------------------------------------------------
-- Real printer engines
paramName :: Int -> Builder
paramName next = "param" <> fromString (show next)
runBuilderFlat :: Printer -> State (InsOrdHashMap Value Int) Builder
runBuilderFlat = go 0
where
go level =
\case
UnsafeTextPrinter q -> pure (LT.fromText q)
SeqPrinter xs -> fmap (mconcat . filter notEmpty) (mapM (go level) xs)
SepByPrinter x xs -> do
i <- go level x
fmap (mconcat . intersperse i . filter notEmpty) (mapM (go level) xs)
NewlinePrinter -> pure " "
IndentPrinter n p -> go (level + n) p
ValuePrinter (ArrayValue x) | V.null x -> pure "[]"
ValuePrinter v -> do
themap <- get
next <- case OMap.lookup v themap of
Just next -> pure next
Nothing -> do next <- gets OMap.size
modify (OMap.insert v next)
pure next
pure ("@" <> paramName next)
notEmpty = (/= mempty)
runBuilderPretty :: Printer -> State (InsOrdHashMap Value Int) Builder
runBuilderPretty = go 0
where
go level =
\case
UnsafeTextPrinter q -> pure (LT.fromText q)
SeqPrinter xs -> fmap (mconcat . filter notEmpty) (mapM (go level) xs)
SepByPrinter x xs -> do
i <- go level x
fmap (mconcat . intersperse i . filter notEmpty) (mapM (go level) xs)
NewlinePrinter -> pure ("\n" <> indentation level)
IndentPrinter n p -> go (level + n) p
ValuePrinter (ArrayValue x)
| V.null x -> pure "[]"
ValuePrinter v -> do
themap <- get
next <-
case OMap.lookup v themap of
Just next -> pure next
Nothing -> do
next <- gets OMap.size
modify (OMap.insert v next)
pure next
pure ("@" <> paramName next)
indentation n = LT.fromText (T.replicate n " ")
notEmpty = (/= mempty)

View File

@ -0,0 +1,623 @@
{-# LANGUAGE DuplicateRecordFields #-}
-- | Types for Transact-SQL aka T-SQL; the language of SQL Server.
module Hasura.Backends.BigQuery.Types where
import qualified Data.Text.Encoding as T
import Control.DeepSeq
import Data.Aeson (ToJSONKey,FromJSONKey,ToJSON,FromJSON)
import qualified Data.Aeson as J
import Data.ByteString (ByteString)
import qualified Data.ByteString.Base64 as Base64
import qualified Data.ByteString.Lazy as L
import Data.Data
import Data.Hashable
import qualified Data.Text as T
import Data.Text.Extended
import Data.Vector (Vector)
import Data.Vector.Instances ()
import GHC.Generics
import Hasura.Incremental.Internal.Dependency
import Hasura.Prelude
import Hasura.RQL.Types.Error
import qualified Language.GraphQL.Draft.Syntax as G
import Language.Haskell.TH.Syntax
data Select = Select
{ selectTop :: !Top
, selectProjections :: !(NonEmpty Projection)
, selectFrom :: !From
, selectJoins :: ![Join]
, selectWhere :: !Where
, selectFor :: !For
, selectOrderBy :: !(Maybe (NonEmpty OrderBy))
, selectOffset :: !(Maybe Expression)
, selectGroupBy :: [FieldName]
, selectFinalWantedFields :: !(Maybe [Text])
} deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON Select
instance Hashable Select
instance Cacheable Select
instance ToJSON Select
instance NFData Select
data ArrayAgg = ArrayAgg
{ arrayAggProjections :: !(NonEmpty Projection)
, arrayAggOrderBy :: !(Maybe (NonEmpty OrderBy))
, arrayAggTop :: !Top
, arrayAggOffset :: !(Maybe Expression)
} deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON ArrayAgg
instance Hashable ArrayAgg
instance Cacheable ArrayAgg
instance ToJSON ArrayAgg
instance NFData ArrayAgg
data Reselect = Reselect
{ reselectProjections :: !(NonEmpty Projection)
, reselectFor :: !For
, reselectWhere :: !Where
} deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON Reselect
instance Hashable Reselect
instance Cacheable Reselect
instance ToJSON Reselect
instance NFData Reselect
data OrderBy = OrderBy
{ orderByFieldName :: FieldName
, orderByOrder :: Order
, orderByNullsOrder :: NullsOrder
} deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON OrderBy
instance Hashable OrderBy
instance Cacheable OrderBy
instance ToJSON OrderBy
instance NFData OrderBy
data Order
= AscOrder
| DescOrder
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON Order
instance Hashable Order
instance Cacheable Order
instance ToJSON Order
instance NFData Order
data NullsOrder
= NullsFirst
| NullsLast
| NullsAnyOrder
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON NullsOrder
instance Hashable NullsOrder
instance Cacheable NullsOrder
instance ToJSON NullsOrder
instance NFData NullsOrder
data For
= JsonFor ForJson
| NoFor
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON For
instance Hashable For
instance Cacheable For
instance ToJSON For
instance NFData For
data ForJson = ForJson
{ jsonCardinality :: JsonCardinality
, jsonRoot :: Root
} deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON ForJson
instance Hashable ForJson
instance Cacheable ForJson
instance ToJSON ForJson
instance NFData ForJson
data Root
= NoRoot
| Root Text
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON Root
instance Hashable Root
instance Cacheable Root
instance ToJSON Root
instance NFData Root
data JsonCardinality
= JsonArray
| JsonSingleton
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON JsonCardinality
instance Hashable JsonCardinality
instance Cacheable JsonCardinality
instance ToJSON JsonCardinality
instance NFData JsonCardinality
data Projection
= ExpressionProjection (Aliased Expression)
| FieldNameProjection (Aliased FieldName)
| AggregateProjection (Aliased Aggregate)
| StarProjection
| ArrayAggProjection (Aliased ArrayAgg)
| EntityProjection (Aliased EntityAlias)
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON Projection
instance Hashable Projection
instance Cacheable Projection
instance ToJSON Projection
instance NFData Projection
data Join = Join
{ joinSource :: !JoinSource
, joinAlias :: !EntityAlias
, joinOn :: [(FieldName,FieldName)]
, joinProvenance :: !JoinProvenance
, joinFieldName :: !Text
, joinExtractPath :: !(Maybe Text)
, joinRightTable :: !EntityAlias
} deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON Join
instance Hashable Join
instance Cacheable Join
instance ToJSON Join
instance NFData Join
data JoinProvenance
= OrderByJoinProvenance
| ObjectJoinProvenance
| ArrayAggregateJoinProvenance
| ArrayJoinProvenance
| MultiplexProvenance
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON JoinProvenance
instance Hashable JoinProvenance
instance Cacheable JoinProvenance
instance ToJSON JoinProvenance
instance NFData JoinProvenance
data JoinSource
= JoinSelect Select
-- We're not using existingJoins at the moment, which was used to
-- avoid re-joining on the same table twice.
-- | JoinReselect Reselect
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON JoinSource
instance Hashable JoinSource
instance Cacheable JoinSource
instance ToJSON JoinSource
instance NFData JoinSource
newtype Where =
Where [Expression]
deriving (NFData, Eq, Show, Generic, Data, Lift, FromJSON, ToJSON, Hashable, Cacheable)
instance Monoid Where where
mempty = Where mempty
instance Semigroup Where where
(Where x) <> (Where y) = Where (x <> y)
data Top
= NoTop
| Top Int
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON Top
instance Hashable Top
instance Cacheable Top
instance ToJSON Top
instance NFData Top
instance Monoid Top where
mempty = NoTop
instance Semigroup Top where
(<>) :: Top -> Top -> Top
(<>) NoTop x = x
(<>) x NoTop = x
(<>) (Top x) (Top y) = Top (min x y)
data Expression
= ValueExpression Value
| InExpression Expression Value
| AndExpression [Expression]
| OrExpression [Expression]
| NotExpression Expression
| ExistsExpression Select
| SelectExpression Select
| IsNullExpression Expression
| IsNotNullExpression Expression
| ColumnExpression FieldName
| EqualExpression Expression Expression
| NotEqualExpression Expression Expression
| JsonQueryExpression Expression
-- ^ This one acts like a "cast to JSON" and makes SQL Server
-- behave like it knows your field is JSON and not double-encode
-- it.
| ToStringExpression Expression
| JsonValueExpression Expression JsonPath
-- ^ This is for getting actual atomic values out of a JSON
-- string.
| OpExpression Op Expression Expression
| CastExpression Expression ScalarType
| ConditionalProjection Expression FieldName
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON Expression
instance Hashable Expression
instance Cacheable Expression
instance ToJSON Expression
instance NFData Expression
data JsonPath
= RootPath
| FieldPath JsonPath Text
| IndexPath JsonPath Integer
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON JsonPath
instance Hashable JsonPath
instance Cacheable JsonPath
instance ToJSON JsonPath
instance NFData JsonPath
data Aggregate
= CountAggregate (Countable FieldName)
| OpAggregates !Text (NonEmpty (Text, Expression))
| OpAggregate !Text Expression
| TextAggregate !Text
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON Aggregate
instance Hashable Aggregate
instance Cacheable Aggregate
instance ToJSON Aggregate
instance NFData Aggregate
data Countable fieldname
= StarCountable
| NonNullFieldCountable (NonEmpty fieldname)
| DistinctCountable (NonEmpty fieldname)
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON a => FromJSON (Countable a)
instance Hashable a => Hashable (Countable a)
instance Cacheable a => Cacheable (Countable a)
instance ToJSON a => ToJSON (Countable a)
instance NFData a => NFData (Countable a)
data From
= FromQualifiedTable (Aliased TableName)
deriving (Eq, Show, Generic, Data, Lift, Ord)
instance FromJSON From
instance Hashable From
instance Cacheable From
instance ToJSON From
instance NFData From
data OpenJson = OpenJson
{ openJsonExpression :: Expression
, openJsonWith :: NonEmpty JsonFieldSpec
} deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON OpenJson
instance Hashable OpenJson
instance Cacheable OpenJson
instance ToJSON OpenJson
instance NFData OpenJson
data JsonFieldSpec
= IntField Text
| JsonField Text
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON JsonFieldSpec
instance Hashable JsonFieldSpec
instance Cacheable JsonFieldSpec
instance ToJSON JsonFieldSpec
instance NFData JsonFieldSpec
data Aliased a = Aliased
{ aliasedThing :: !a
, aliasedAlias :: !Text
} deriving (Eq, Show, Generic, Data, Lift, Functor)
instance FromJSON a => FromJSON (Aliased a)
instance Hashable a => Hashable (Aliased a)
instance Cacheable a => Cacheable (Aliased a)
instance ToJSON a => ToJSON (Aliased a)
instance NFData a => NFData (Aliased a)
deriving instance Ord a => Ord (Aliased a)
newtype SchemaName = SchemaName
{ schemaNameParts :: [Text]
} deriving (NFData, Eq, Show, Generic, Data, Lift, FromJSON, ToJSON, Hashable, Cacheable)
data TableName = TableName
{ tableName :: Text
, tableNameSchema :: Text
} deriving (Eq, Show, Generic, Data, Lift, Ord)
instance FromJSON TableName where
parseJSON =
J.withObject
"TableName"
(\o -> TableName <$> o J..: "name" <*> o J..: "dataset")
instance ToJSON TableName where
toJSON TableName{..} = J.object [ "name" J..= tableName, "dataset" J..= tableNameSchema ]
instance Hashable TableName
instance Cacheable TableName
instance ToJSONKey TableName
instance NFData TableName
instance Arbitrary TableName where arbitrary = genericArbitrary
instance ToTxt TableName where toTxt = T.pack . show
data FieldName = FieldName
{ fieldName :: Text
, fieldNameEntity :: !Text
} deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON FieldName
instance Hashable FieldName
instance Cacheable FieldName
instance ToJSON FieldName
instance NFData FieldName
newtype ColumnName = ColumnName
{ columnName :: Text
} deriving (Eq, Ord, Show, Generic, Data, Lift, FromJSON, ToJSON, ToJSONKey, FromJSONKey, Hashable, Cacheable, NFData, ToTxt)
data Comment = DueToPermission | RequestedSingleObject
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON Comment
instance Hashable Comment
instance Cacheable Comment
instance ToJSON Comment
instance NFData Comment
instance Arbitrary ColumnName where arbitrary = genericArbitrary
newtype EntityAlias = EntityAlias
{ entityAliasText :: Text
} deriving (NFData, Eq, Show, Generic, Data, Lift, FromJSON, ToJSON, Hashable, Cacheable)
data Op
= LessOp
| LessOrEqualOp
| MoreOp
| MoreOrEqualOp
-- | SIN
-- | SNE
-- | SLIKE
-- | SNLIKE
-- | SILIKE
-- | SNILIKE
-- | SSIMILAR
-- | SNSIMILAR
-- | SGTE
-- | SLTE
-- | SNIN
-- | SContains
-- | SContainedIn
-- | SHasKey
-- | SHasKeysAny
-- | SHasKeysAll
deriving (Eq, Show, Generic, Data, Lift)
instance FromJSON Op
instance Hashable Op
instance Cacheable Op
instance ToJSON Op
instance NFData Op
-- | Source for this represenation type:
--
-- https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#google.protobuf.Value
--
-- BigQuery results come in via the REST API as one of these simply types.
--
-- TODO: This omits StructValue -- do we need it?
data Value
= NullValue
| IntegerValue !Int64
-- ^ 64-bit <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#integer_type>
| DecimalValue !Decimal
-- ^ Fixed precision <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#decimal_types>
| BigDecimalValue !BigDecimal
-- ^ Fixed precision <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#decimal_types>
| FloatValue !Float64
-- ^ Floating point <https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#floating_point_types>
| GeographyValue !Geography
| StringValue !Text
| BytesValue !Base64
| BoolValue !Bool
| ArrayValue !(Vector Value)
| TimestampValue !Timestamp
| DateValue !Date
| TimeValue !Time
| DatetimeValue !Datetime
deriving (Show, Eq, Ord, Generic, Data, Lift)
instance FromJSON Value
instance Cacheable Value
instance ToJSON Value
instance NFData Value
instance Hashable Value
-- | BigQuery's conception of a timestamp.
newtype Timestamp = Timestamp Text
deriving (Show, Eq, Ord, Generic, Data, Lift, ToJSON, FromJSON, Cacheable, NFData, Hashable)
-- | BigQuery's conception of a date.
newtype Date = Date Text
deriving (Show, Eq, Ord, Generic, Data, Lift, ToJSON, FromJSON, Cacheable, NFData, Hashable)
-- | BigQuery's conception of a time.
newtype Time = Time Text
deriving (Show, Eq, Ord, Generic, Data, Lift, ToJSON, FromJSON, Cacheable, NFData, Hashable)
-- | BigQuery's conception of a datetime.
newtype Datetime = Datetime Text
deriving (Show, Eq, Ord, Generic, Data, Lift, ToJSON, FromJSON, Cacheable, NFData, Hashable)
-- | BigQuery's conception of an INTEGER/INT64 (they are the same).
newtype Int64 = Int64 Text
deriving (Show, Eq, Ord, Generic, Data, Lift, ToJSON, FromJSON, Cacheable, NFData, Hashable)
intToInt64 :: Int -> Int64
intToInt64 = Int64 . T.pack . show
-- | BigQuery's conception of a fixed precision decimal.
newtype Decimal = Decimal Text
deriving (Show, Eq, Ord, Generic, Data, ToJSON, FromJSON, Cacheable, NFData, Hashable, Lift)
doubleToDecimal :: Double -> Decimal
doubleToDecimal = Decimal . T.decodeUtf8 . L.toStrict . J.encode
-- | BigQuery's conception of a \"big\" fixed precision decimal.
newtype BigDecimal = BigDecimal Text
deriving (Show, Eq, Ord, Generic, Data, ToJSON, FromJSON, Cacheable, NFData, Hashable, Lift)
doubleToBigDecimal :: Double -> BigDecimal
doubleToBigDecimal = BigDecimal . T.decodeUtf8 . L.toStrict . J.encode
-- | BigQuery's conception of a fixed precision decimal.
newtype Float64 = Float64 Text
deriving (Show, Eq, Ord, Generic, Data, ToJSON, FromJSON, Cacheable, NFData, Hashable, Lift)
doubleToFloat64 :: Double -> Float64
doubleToFloat64 = Float64 . T.decodeUtf8 . L.toStrict . J.encode
-- | A base-64 encoded binary string.
newtype Base64 = Base64
{ unBase64 :: ByteString
} deriving (Show, Eq, Ord, Generic, Data, Lift)
instance FromJSON Base64 where parseJSON = fmap (Base64 . L.toStrict . base64Decode) . J.parseJSON
instance ToJSON Base64 where toJSON = J.toJSON . T.decodeUtf8 . Base64.encode . unBase64
instance Cacheable Base64
instance NFData Base64
instance Hashable Base64
newtype Geography = Geography
{ unGeography :: Text
} deriving (Show, Eq, Ord, Generic, Data, Lift, FromJSON, ToJSON)
instance Cacheable Geography
instance NFData Geography
instance Hashable Geography
data ScalarType
= StringScalarType
| BytesScalarType
| IntegerScalarType
| FloatScalarType
| BoolScalarType
| TimestampScalarType
| DateScalarType
| TimeScalarType
| DatetimeScalarType
| GeographyScalarType
| DecimalScalarType
| BigDecimalScalarType
| StructScalarType
deriving (Show, Eq, Ord, Generic, Data, Lift)
instance FromJSON ScalarType
instance Cacheable ScalarType
instance ToJSON ScalarType
instance ToJSONKey ScalarType
instance NFData ScalarType
instance Hashable ScalarType
instance ToTxt ScalarType where toTxt = T.pack . show
--------------------------------------------------------------------------------
-- Unified table metadata
data UnifiedMetadata = UnifiedMetadata
{ tables :: ![UnifiedTableMetadata]
}deriving (Eq, Show)
data UnifiedTableMetadata = UnifiedTableMetadata
{ table :: !UnifiedTableName
, object_relationships :: ![UnifiedObjectRelationship]
, array_relationships :: ![UnifiedArrayRelationship]
, columns :: ![UnifiedColumn]
}deriving (Eq, Show)
data UnifiedColumn = UnifiedColumn
{ name :: !Text
, type' :: !ScalarType
}deriving (Eq, Show)
data UnifiedTableName = UnifiedTableName
{ schema :: !Text
, name :: !Text
}deriving (Eq, Show)
data UnifiedObjectRelationship = UnifiedObjectRelationship
{ using :: !UnifiedUsing
, name :: !Text
}deriving (Eq, Show)
data UnifiedArrayRelationship = UnifiedArrayRelationship
{ using :: !UnifiedUsing
, name :: !Text
}deriving (Eq, Show)
data UnifiedUsing = UnifiedUsing
{ foreign_key_constraint_on :: !UnifiedOn
}deriving (Eq, Show)
data UnifiedOn = UnifiedOn
{ table :: !UnifiedTableName
, column :: !Text
}deriving (Eq, Show)
-- Copied from feature/mssql
newtype FunctionName = FunctionName Text -- TODO: Improve this type when SQL function support added
deriving (FromJSON, ToJSON, ToJSONKey, ToTxt, Arbitrary, Show, Eq, Ord, Hashable, Cacheable, NFData)
--------------------------------------------------------------------------------
-- Backend-related stuff
parseScalarValue :: ScalarType -> J.Value -> Either QErr Value
parseScalarValue scalarType jValue = case scalarType of
StringScalarType -> StringValue <$> parseJValue jValue
BytesScalarType -> StringValue <$> parseJValue jValue
IntegerScalarType -> IntegerValue <$> parseJValue jValue
FloatScalarType -> FloatValue <$> parseJValue jValue
BoolScalarType -> BoolValue <$> parseJValue jValue
DecimalScalarType -> DecimalValue <$> parseJValue jValue
BigDecimalScalarType -> BigDecimalValue <$> parseJValue jValue
_ -> Left (internalError (T.pack ("Unsupported scalar type: " <> show scalarType <> ": " <> show jValue)))
-- TODO: These types:
-- TimestampScalarType -> TimestampValue <$> parseJValue jValue
-- DateScalarType -> DateValue <$> parseJValue jValue
-- TimeScalarType -> TimeValue <$> parseJValue jValue
-- DatetimeScalarType -> DatetimeValue <$> parseJValue jValue
-- GeographyScalarType -> GeographyValue <$> parseJValue jValue
-- RecordScalarType -> RecordValue <$> parseJValue jValue
-- StructScalarType -> StructValue <$> parseJValue jValue
where
parseJValue :: (J.FromJSON a) => J.Value -> Either QErr a
parseJValue = runAesonParser J.parseJSON
isComparableType, isNumType :: ScalarType -> Bool
-- TODO: What does this mean?
isComparableType = \case
BoolScalarType -> True
BytesScalarType -> True
_ -> False
isNumType =
\case
StringScalarType -> False
BytesScalarType -> False
IntegerScalarType -> True
FloatScalarType -> True
BoolScalarType -> False
TimestampScalarType -> False
DateScalarType -> False
TimeScalarType -> False
DatetimeScalarType -> False
GeographyScalarType -> False
DecimalScalarType -> True
BigDecimalScalarType -> True
StructScalarType -> False
getGQLTableName :: TableName -> Either QErr G.Name
getGQLTableName (TableName table schema) = do
let textName = schema <> "_" <> table
onNothing (G.mkName textName) $ throw400 ValidationFailed $
"cannot include " <> textName <> " in the GraphQL schema because it is not a valid GraphQL identifier"

View File

@ -4,6 +4,7 @@ module Hasura.Backends.MSSQL.Instances.Schema () where
import Hasura.Prelude
import qualified Data.HashMap.Strict as Map
import qualified Data.List.NonEmpty as NE
import qualified Database.ODBC.SQLServer as ODBC

View File

@ -4,3 +4,4 @@ module Hasura.GraphQL.Execute.Instances (module B) where
import Hasura.Backends.MSSQL.Instances.Execute as B ()
import Hasura.Backends.Postgres.Instances.Execute as B ()
import Hasura.Backends.BigQuery.Instances.Execute as B ()

View File

@ -15,6 +15,7 @@ import qualified Language.GraphQL.Draft.Syntax as G
import Control.Monad.Trans.Control (MonadBaseControl)
import Data.Text.Extended
import qualified Data.Text as T
import qualified Hasura.Backends.Postgres.Execute.RemoteJoin as RR
import qualified Hasura.Backends.Postgres.SQL.DML as S
@ -27,6 +28,8 @@ import qualified Hasura.GraphQL.Execute.Query as E
import qualified Hasura.GraphQL.Execute.RemoteJoin as RR
import qualified Hasura.GraphQL.Transport.HTTP.Protocol as GH
import qualified Hasura.SQL.AnyBackend as AB
import qualified Hasura.Backends.BigQuery.DataLoader.Plan as BigQuery
import qualified Hasura.Backends.BigQuery.Plan as BigQuery
import Hasura.Backends.Postgres.SQL.Value
import Hasura.Backends.Postgres.Translate.Column (toTxtValue)
@ -95,7 +98,7 @@ explainQueryField userInfo fieldName rootField = do
RFRemote _ -> throw400 InvalidParams "only hasura queries can be explained"
RFAction _ -> throw400 InvalidParams "query actions cannot be explained"
RFRaw _ -> pure $ Just $ FieldPlan fieldName Nothing Nothing
RFDB _ exists -> runMaybeT $ do
RFDB _ exists -> dispatch [do
-- TEMPORARY!!!
-- We don't handle non-Postgres backends yet: for now, we filter root fields to only keep those
-- that are targeting postgres, and we *silently* discard all the others. This is fine for now, as
@ -120,6 +123,23 @@ explainQueryField userInfo fieldName rootField = do
liftTx $ map runIdentity <$>
Q.listQE dmlTxErrorHandler (Q.fromText withExplain) () True
pure $ FieldPlan fieldName (Just textSQL) $ Just planLines
,do
-- BigQuery case
SourceConfigWith _ (QDBR bqQDB) <-
hoistMaybe $ AB.unpackAnyBackend exists
lift $ do
actionsForest <- BigQuery.planToForest userInfo bqQDB
pure $
FieldPlan
fieldName
(Just ("--\n" <> BigQuery.drawActionsForestSQL actionsForest))
(Just ("": T.lines (BigQuery.drawActionsForest actionsForest)))]
where dispatch [] = pure Nothing
dispatch (x:xs) = do
mv <- runMaybeT x
case mv of
Nothing -> dispatch xs
Just v -> pure (Just v)
-- NOTE: This function has a 'MonadTrace' constraint in master, but we don't need it
-- here. We should evaluate if we need it here.

View File

@ -4,3 +4,4 @@ module Hasura.GraphQL.Schema.Instances (module B) where
import Hasura.Backends.MSSQL.Instances.Schema as B ()
import Hasura.Backends.Postgres.Instances.Schema as B ()
import Hasura.Backends.BigQuery.Instances.Schema as B ()

View File

@ -4,3 +4,4 @@ module Hasura.GraphQL.Transport.Instances (module B) where
import Hasura.Backends.MSSQL.Instances.Transport as B ()
import Hasura.Backends.Postgres.Instances.Transport as B ()
import Hasura.Backends.BigQuery.Instances.Transport as B ()

View File

@ -73,6 +73,7 @@ import Data.Text.Extended
import Data.Text.NonEmpty
import Data.URL.Template
import qualified Hasura.Backends.Postgres.SQL.Types as PG
import Hasura.EncJSON

View File

@ -71,6 +71,7 @@ data Code
| PostgresMaxConnectionsError
| MSSQLError
| DatabaseConnectionTimeout
| BigQueryError
| NotSupported
| DependencyError
| InvalidHeaders
@ -158,6 +159,7 @@ instance Show Code where
InvalidCustomTypes -> "invalid-custom-types"
MethodNotAllowed -> "method-not-allowed"
Conflict -> "conflict"
BigQueryError -> "bigquery-error"
ActionWebhookCode t -> T.unpack t
CustomCode t -> T.unpack t

View File

@ -4,3 +4,4 @@ module Hasura.RQL.Types.Instances (module B) where
import Hasura.Backends.MSSQL.Instances.Types as B ()
import Hasura.Backends.Postgres.Instances.Types as B ()
import Hasura.Backends.BigQuery.Instances.Types as B ()

View File

@ -4,3 +4,4 @@ module Hasura.RQL.Types.Metadata.Instances (module B) where
import Hasura.Backends.MSSQL.Instances.Metadata as B ()
import Hasura.Backends.Postgres.Instances.Metadata as B ()
import Hasura.Backends.BigQuery.Instances.Metadata as B ()

View File

@ -16,12 +16,14 @@ import Data.Text.Extended
data BackendType
= Postgres
| MSSQL
| BigQuery
deriving (Eq, Ord, Bounded, Enum)
-- | The name of the backend, as we expect it to appear in our metadata and API.
instance ToTxt BackendType where
toTxt Postgres = "postgres"
toTxt MSSQL = "mssql"
toTxt BigQuery = "bigquery"
-- | The FromJSON instance uses this lookup mechanism to avoid having
-- to duplicate and hardcode the backend string.

View File

@ -115,6 +115,28 @@ data RQLMetadataV1
| RMMssqlDropDeletePermission !(DropPerm 'MSSQL (DelPerm 'MSSQL))
| RMMssqlSetPermissionComment !(SetPermComment 'MSSQL)
-- BigQuery sources
| RMBigqueryAddSource !(AddSource 'BigQuery)
| RMBigqueryDropSource !DropSource
| RMBigqueryTrackTable !(TrackTableV2 'BigQuery)
| RMBigqueryUntrackTable !(UntrackTable 'BigQuery)
| RMBigqueryCreateObjectRelationship !(CreateObjRel 'BigQuery)
| RMBigqueryCreateArrayRelationship !(CreateArrRel 'BigQuery)
| RMBigqueryDropRelationship !(DropRel 'BigQuery)
| RMBigquerySetRelationshipComment !(SetRelComment 'BigQuery)
| RMBigqueryRenameRelationship !(RenameRel 'BigQuery)
| RMBigqueryCreateInsertPermission !(CreateInsPerm 'BigQuery)
| RMBigqueryCreateSelectPermission !(CreateSelPerm 'BigQuery)
| RMBigqueryCreateUpdatePermission !(CreateUpdPerm 'BigQuery)
| RMBigqueryCreateDeletePermission !(CreateDelPerm 'BigQuery)
| RMBigqueryDropInsertPermission !(DropPerm 'BigQuery (InsPerm 'BigQuery))
| RMBigqueryDropSelectPermission !(DropPerm 'BigQuery (SelPerm 'BigQuery))
| RMBigqueryDropUpdatePermission !(DropPerm 'BigQuery (UpdPerm 'BigQuery))
| RMBigqueryDropDeletePermission !(DropPerm 'BigQuery (DelPerm 'BigQuery))
| RMBigquerySetPermissionComment !(SetPermComment 'BigQuery)
-- Inconsistent metadata
| RMGetInconsistentMetadata !GetInconsistentMetadata
| RMDropInconsistentMetadata !DropInconsistentMetadata
@ -390,6 +412,28 @@ runMetadataQueryV1M env currentResourceVersion = \case
RMPgRedeliverEvent q -> runRedeliverEvent q
RMPgInvokeEventTrigger q -> runInvokeEventTrigger q
RMBigqueryAddSource q -> runAddSource q
RMBigqueryDropSource q -> runDropSource q
RMBigqueryTrackTable q -> runTrackTableV2Q q
RMBigqueryUntrackTable q -> runUntrackTableQ q
RMBigqueryCreateObjectRelationship q -> runCreateRelationship ObjRel q
RMBigqueryCreateArrayRelationship q -> runCreateRelationship ArrRel q
RMBigqueryDropRelationship q -> runDropRel q
RMBigquerySetRelationshipComment q -> runSetRelComment q
RMBigqueryRenameRelationship q -> runRenameRel q
RMBigqueryCreateInsertPermission q -> runCreatePerm q
RMBigqueryCreateSelectPermission q -> runCreatePerm q
RMBigqueryCreateUpdatePermission q -> runCreatePerm q
RMBigqueryCreateDeletePermission q -> runCreatePerm q
RMBigqueryDropInsertPermission q -> runDropPerm q
RMBigqueryDropSelectPermission q -> runDropPerm q
RMBigqueryDropUpdatePermission q -> runDropPerm q
RMBigqueryDropDeletePermission q -> runDropPerm q
RMBigquerySetPermissionComment q -> runSetPermComment q
RMMssqlAddSource q -> runAddSource q
RMMssqlDropSource q -> runDropSource q
RMMssqlTrackTable q -> runTrackTableV2Q q

View File

@ -27,6 +27,7 @@ import Hasura.Server.Version (HasVersion)
import Hasura.Session
import qualified Hasura.Backends.MSSQL.DDL.RunSQL as MSSQL
import qualified Hasura.Backends.BigQuery.DDL.RunSQL as BigQuery
import qualified Hasura.Tracing as Tracing
data RQLQuery
@ -37,6 +38,8 @@ data RQLQuery
| RQCount !CountQuery
| RQRunSql !RunSQL
| RQMssqlRunSql !MSSQL.MSSQLRunSQL
| RQBigqueryRunSql !BigQuery.BigQueryRunSQL
| RQBigqueryDatabaseInspection !BigQuery.BigQueryRunSQL
| RQBulk ![RQLQuery]
deriving (Show)
@ -120,4 +123,6 @@ runQueryM env = \case
RQCount q -> runCount q
RQRunSql q -> runRunSQL q
RQMssqlRunSql q -> MSSQL.runSQL q
RQBigqueryRunSql q -> BigQuery.runSQL q
RQBigqueryDatabaseInspection q -> BigQuery.runDatabaseInspection q
RQBulk l -> encJFromList <$> indexedMapM (runQueryM env) l