graphql-engine/server/src-lib/Hasura/Backends/BigQuery/DDL/RunSQL.hs
Rakesh Emmadi 22a5ebf287 server/bigquery: improve throwing upstream exceptions
PR-URL: https://github.com/hasura/graphql-engine-mono/pull/4095
GitOrigin-RevId: e19ffe058aaffa1cfa8d155f2e3a6ecafd6aab13
2022-03-30 13:54:18 +00:00

113 lines
3.5 KiB
Haskell

{-# LANGUAGE TemplateHaskell #-}
-- |
-- Working example:
--
-- \$ curl -XPOST http://localhost:8080/v2/query -d @- <<EOF
-- {
-- "type":"bigquery_run_sql",
-- "args": {
-- "sql":"select 3 * 4 as foo, \"Hello, World!\" as bar",
-- "source":"chinook"
-- }
-- }
-- EOF
-- {"result_type":"TuplesOk","result":[["foo","bar"],["12","Hello, World!"]]}
module Hasura.Backends.BigQuery.DDL.RunSQL
( runSQL,
runDatabaseInspection,
BigQueryRunSQL,
)
where
import Data.Aeson qualified as J
import Data.Aeson.TH (deriveJSON)
import Data.Aeson.Text (encodeToLazyText)
import Data.HashMap.Strict.InsOrd qualified as OMap
import Data.Text qualified as T
import Data.Text.Lazy qualified as LT
import Data.Vector qualified as V
import Hasura.Backends.BigQuery.Execute qualified as Execute
import Hasura.Backends.BigQuery.Source (BigQuerySourceConfig (..))
import Hasura.Base.Error
import Hasura.EncJSON
import Hasura.Prelude
import Hasura.RQL.DDL.Schema (RunSQLRes (..))
import Hasura.RQL.Types (CacheRWM, MetadataM, SourceName, askSourceConfig)
import Hasura.SQL.Backend
data BigQueryRunSQL = BigQueryRunSQL
{ _mrsSql :: Text,
_mrsSource :: !SourceName
}
deriving (Show, Eq)
$(deriveJSON hasuraJSON ''BigQueryRunSQL)
runSQL ::
(MonadIO m, CacheRWM m, MonadError QErr m, MetadataM m) =>
BigQueryRunSQL ->
m EncJSON
runSQL = runSQL_ recordSetAsHeaderAndRows
-- | The SQL query in the request is ignored
runDatabaseInspection ::
(MonadIO m, CacheRWM m, MonadError QErr m, MetadataM m) =>
BigQueryRunSQL ->
m EncJSON
runDatabaseInspection (BigQueryRunSQL _query source) = do
BigQuerySourceConfig {_scDatasets = dataSets} <- askSourceConfig @'BigQuery source
let queries =
[ "SELECT *, ARRAY(SELECT as STRUCT * from "
<> dataSet
<> ".INFORMATION_SCHEMA.COLUMNS WHERE table_name = t.table_name) as columns from "
<> dataSet
<> ".INFORMATION_SCHEMA.TABLES as t"
| dataSet <- dataSets
]
query' = T.intercalate " UNION ALL " queries
runSQL_ recordSetAsSchema (BigQueryRunSQL query' source)
runSQL_ ::
(MonadIO m, CacheRWM m, MonadError QErr m, MetadataM m) =>
(Execute.RecordSet -> J.Value) ->
BigQueryRunSQL ->
m EncJSON
runSQL_ f (BigQueryRunSQL query source) = do
sourceConfig <- askSourceConfig @'BigQuery source
result <-
Execute.streamBigQuery
(_scConnection sourceConfig)
Execute.BigQuery {query = LT.fromStrict query, parameters = mempty}
case result of
Left executeProblem -> do
let errorMessage = Execute.executeProblemMessage executeProblem
throwError (err400 BigQueryError errorMessage) {qeInternal = Just $ ExtraInternal $ J.toJSON executeProblem}
Right recordSet ->
pure
( encJFromJValue
(RunSQLRes "TuplesOk" (f recordSet))
)
recordSetAsHeaderAndRows :: Execute.RecordSet -> J.Value
recordSetAsHeaderAndRows Execute.RecordSet {rows} = J.toJSON (thead : tbody)
where
thead =
case rows V.!? 0 of
Nothing -> []
Just row ->
map (J.toJSON . (coerce :: Execute.FieldNameText -> Text)) (OMap.keys row)
tbody :: [[J.Value]]
tbody = map (map J.toJSON . OMap.elems) (toList rows)
recordSetAsSchema :: Execute.RecordSet -> J.Value
recordSetAsSchema rs@(Execute.RecordSet {rows}) =
recordSetAsHeaderAndRows $
rs
{ Execute.rows =
OMap.adjust
(Execute.TextOutputValue . LT.toStrict . encodeToLazyText . J.toJSON)
(Execute.FieldNameText "columns")
<$> rows
}