plugins as opendd objects (#889)

<!-- The PR description should answer 2 (maybe 3) important questions:
-->

### What

<!-- What is this PR trying to accomplish (and why, if it's not
obvious)? -->

This PR introduces a new OpenDD object of kind `LifecyclePluginHook`. An
example
```json
{
  "kind": "LifecyclePluginHook",
  "version": "v1",
  "definition": {
    "pre": "parse",
    "name": "test",
    "url": "http://localhost:8787",
    "config": {
      "request": {
        "headers": {
          "additional": {
            "hasura-m-auth": {
              "value": "zZkhKqFjqXR4g5MZCsJUZCnhCcoPyZ"
            }
          }
        },
        "session": {},
        "rawRequest": {
          "query": {},
          "variables": {}
        }
      }
    }
  }
}
```

The plugin configs (only pre-parse plugins for now) are stored in the
engine state and used wherever required.

### How

We have added the OpenDD object.

V3_GIT_ORIGIN_REV_ID: aa02315362e5fc9a36b63ead48909e1baa92779f
This commit is contained in:
paritosh-08 2024-08-08 14:30:51 +05:30 committed by hasura-bot
parent aa287dbd44
commit 7e3e8264d8
42 changed files with 547 additions and 157 deletions

2
v3/Cargo.lock generated
View File

@ -3615,8 +3615,8 @@ dependencies = [
"axum",
"hasura-authn-core",
"lang-graphql",
"open-dds",
"reqwest",
"schemars",
"serde",
"serde_json",
"thiserror",

View File

@ -16,6 +16,8 @@ use axum::{
};
use base64::engine::Engine;
use clap::Parser;
use open_dds::plugins::LifecyclePluginHookPreParse;
use pre_execution_plugin::execute::pre_execution_plugins_handler;
use reqwest::header::CONTENT_TYPE;
use serde::Serialize;
use tower_http::cors::CorsLayer;
@ -24,7 +26,6 @@ use tower_http::trace::TraceLayer;
use engine::{
authentication::{resolve_auth_config, AuthConfig, AuthModeConfig},
internal_flags::{resolve_unstable_features, UnstableFeature},
plugins::read_pre_execution_plugins_config,
VERSION,
};
use execute::HttpContext;
@ -34,9 +35,6 @@ use hasura_authn_jwt::jwt;
use hasura_authn_noauth as noauth;
use hasura_authn_webhook::webhook;
use lang_graphql as gql;
use pre_execution_plugin::{
configuration::PrePluginConfig, execute::pre_execution_plugins_handler,
};
use schema::GDS;
use tracing_util::{
add_event_on_active_span, set_attribute_on_active_span, set_status_on_current_span,
@ -99,9 +97,6 @@ struct ServerOptions {
value_delimiter = ','
)]
unstable_features: Vec<UnstableFeature>,
/// The configuration file used for authentication.
#[arg(long, value_name = "PATH", env = "pre_execution_plugins_path")]
pre_execution_plugins_path: Option<PathBuf>,
/// Whether internal errors should be shown or censored.
/// It is recommended to only show errors while developing since internal errors may contain
@ -119,8 +114,8 @@ struct EngineState {
http_context: HttpContext,
schema: gql::schema::Schema<GDS>,
auth_config: AuthConfig,
pre_execution_plugins_config: Vec<PrePluginConfig>,
sql_context: Arc<sql::catalog::Catalog>,
pre_parse_plugins: Vec<LifecyclePluginHookPreParse>,
}
#[tokio::main]
@ -199,8 +194,6 @@ enum StartupError {
ReadAuth(anyhow::Error),
#[error("failed to build engine state - {0}")]
ReadSchema(anyhow::Error),
#[error("could not read the pre-execution plugins config - {0}")]
ReadPrePlugin(anyhow::Error),
}
impl TraceableError for StartupError {
@ -367,7 +360,6 @@ async fn start_engine(server: &ServerOptions) -> Result<(), StartupError> {
expose_internal_errors,
&server.authn_config_path,
&server.metadata_path,
&server.pre_execution_plugins_path,
metadata_resolve_configuration,
)
.map_err(StartupError::ReadSchema)?;
@ -676,7 +668,7 @@ where
B::Error: Display,
{
let (request, response) = pre_execution_plugins_handler(
&engine_state.pre_execution_plugins_config,
&engine_state.pre_parse_plugins,
&engine_state.http_context.client,
session,
request,
@ -749,7 +741,6 @@ fn build_state(
expose_internal_errors: execute::ExposeInternalErrors,
authn_config_path: &PathBuf,
metadata_path: &PathBuf,
pre_execution_plugins_path: &Option<PathBuf>,
metadata_resolve_configuration: metadata_resolve::configuration::Configuration,
) -> Result<Arc<EngineState>, anyhow::Error> {
// Auth Config
@ -757,11 +748,6 @@ fn build_state(
let (auth_config, auth_warnings) =
resolve_auth_config(&raw_auth_config).map_err(StartupError::ReadAuth)?;
// Plugins
let pre_execution_plugins_config =
read_pre_execution_plugins_config(pre_execution_plugins_path)
.map_err(StartupError::ReadPrePlugin)?;
// Metadata
let raw_metadata = std::fs::read_to_string(metadata_path)?;
let metadata = open_dds::Metadata::from_json_str(&raw_metadata)?;
@ -776,6 +762,7 @@ fn build_state(
client: reqwest::Client::new(),
ndc_response_size_limit: None,
};
let pre_parse_plugins = resolved_metadata.pre_parse_plugins.clone();
let sql_context = sql::catalog::Catalog::from_metadata(resolved_metadata.clone());
let schema = schema::GDS {
metadata: resolved_metadata,
@ -786,8 +773,8 @@ fn build_state(
http_context,
schema,
auth_config,
pre_execution_plugins_config,
sql_context: sql_context.into(),
pre_parse_plugins,
});
Ok(state)
}

View File

@ -1,7 +1,6 @@
pub mod authentication;
pub mod build;
pub mod internal_flags;
pub mod plugins;
// This is set by the build.rs script.
/// The version of the v3-engine release.

View File

@ -1,29 +0,0 @@
use pre_execution_plugin::configuration::PrePluginConfig;
use serde::Deserialize;
#[derive(Debug, Clone, Deserialize)]
#[serde(tag = "version", content = "definition")]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
/// Definition of the Pre-execution Plugin configuration used by the API server.
enum PreExecutionPluginConfiguration {
V1(PrePluginConfig),
}
pub fn read_pre_execution_plugins_config(
path: &Option<std::path::PathBuf>,
) -> Result<Vec<PrePluginConfig>, anyhow::Error> {
let pre_plugins: Vec<PreExecutionPluginConfiguration> = match path {
Some(path) => {
let raw_pre_execution_plugins_config = std::fs::read_to_string(path)?;
Ok::<_, anyhow::Error>(serde_json::from_str(&raw_pre_execution_plugins_config)?)
}
None => Ok(vec![]),
}?;
Ok(pre_plugins
.into_iter()
.map(|p| match p {
PreExecutionPluginConfiguration::V1(config) => config,
})
.collect())
}

View File

@ -13,6 +13,7 @@ pub mod models_graphql;
pub mod object_boolean_expressions;
pub mod object_types;
pub mod order_by_expressions;
pub mod plugins;
pub mod relationships;
pub mod relay;
pub mod roles;
@ -253,6 +254,8 @@ pub fn resolve(
&commands_with_permissions,
);
let pre_parse_plugins = plugins::resolve(&metadata_accessor);
Ok((
Metadata {
scalar_types,
@ -265,6 +268,7 @@ pub fn resolve(
aggregate_expressions,
graphql_config: graphql_config.global,
roles,
pre_parse_plugins,
},
all_warnings,
))

View File

@ -0,0 +1,16 @@
use open_dds::plugins::LifecyclePluginHookPreParse;
use open_dds::plugins::LifecyclePluginHookV1;
pub fn resolve(
metadata_accessor: &open_dds::accessor::MetadataAccessor,
) -> Vec<LifecyclePluginHookPreParse> {
let mut pre_parse_plugins = Vec::new();
for plugin in &metadata_accessor.plugins {
match &plugin.object {
LifecyclePluginHookV1::Parse(plugin) => pre_parse_plugins.push(plugin.clone()),
}
}
pre_parse_plugins
}

View File

@ -9,6 +9,8 @@ use open_dds::{
types::CustomTypeName,
};
use open_dds::plugins::LifecyclePluginHookPreParse;
use crate::types::subgraph::Qualified;
use crate::stages::{
@ -33,5 +35,6 @@ pub struct Metadata {
pub aggregate_expressions:
BTreeMap<Qualified<AggregateExpressionName>, aggregates::AggregateExpression>,
pub graphql_config: graphql_config::GlobalGraphqlConfig,
pub pre_parse_plugins: Vec<LifecyclePluginHookPreParse>,
pub roles: Vec<Role>,
}

View File

@ -2971,6 +2971,7 @@ input_file: crates/metadata-resolve/tests/passing/aggregate_expressions/relation
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -1761,6 +1761,7 @@ input_file: crates/metadata-resolve/tests/passing/aggregate_expressions/root_fie
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -703,6 +703,7 @@ input_file: crates/metadata-resolve/tests/passing/boolean_expression_type/basic/
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [
Role(
"admin",

View File

@ -1830,6 +1830,7 @@ input_file: crates/metadata-resolve/tests/passing/boolean_expression_type/nested
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [
Role(
"admin",

View File

@ -450,6 +450,7 @@ input_file: crates/metadata-resolve/tests/passing/boolean_expression_type/no_gra
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [
Role(
"admin",

View File

@ -1323,6 +1323,7 @@ input_file: crates/metadata-resolve/tests/passing/boolean_expression_type/range/
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [
Role(
"admin",

View File

@ -502,6 +502,7 @@ input_file: crates/metadata-resolve/tests/passing/boolean_expression_type/two_da
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [
Role(
"admin",

View File

@ -272,6 +272,7 @@ input_file: crates/metadata-resolve/tests/passing/commands/functions/all_args_ar
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -253,6 +253,7 @@ input_file: crates/metadata-resolve/tests/passing/commands/functions/issue_when_
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[

View File

@ -272,6 +272,7 @@ input_file: crates/metadata-resolve/tests/passing/commands/procedures/all_args_a
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -253,6 +253,7 @@ input_file: crates/metadata-resolve/tests/passing/commands/procedures/issue_when
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[

View File

@ -46,6 +46,7 @@ input_file: crates/metadata-resolve/tests/passing/data_connector_link/invalid_nd
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[

View File

@ -731,6 +731,7 @@ input_file: crates/metadata-resolve/tests/passing/missing_subgraph_when_ignoring
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [
Role(
"admin",

View File

@ -592,6 +592,7 @@ input_file: crates/metadata-resolve/tests/passing/models/all_args_are_set_includ
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -573,6 +573,7 @@ input_file: crates/metadata-resolve/tests/passing/models/issue_when_not_all_argu
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[

View File

@ -466,6 +466,7 @@ input_file: crates/metadata-resolve/tests/passing/order_by_expressions/model_v1_
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -358,6 +358,7 @@ input_file: crates/metadata-resolve/tests/passing/order_by_expressions/model_v2_
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -466,6 +466,7 @@ input_file: crates/metadata-resolve/tests/passing/order_by_expressions/model_v2_
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -667,6 +667,7 @@ input_file: crates/metadata-resolve/tests/passing/order_by_expressions/nested/me
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -46,6 +46,7 @@ input_file: crates/metadata-resolve/tests/passing/simple/metadata.json
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -46,6 +46,7 @@ input_file: crates/metadata-resolve/tests/passing/subgraph_valid_name/metadata.j
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -46,6 +46,7 @@ input_file: crates/metadata-resolve/tests/passing/supergraph/config_object_in_su
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -46,6 +46,7 @@ input_file: crates/metadata-resolve/tests/passing/supergraph/missing/metadata.js
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -46,6 +46,7 @@ input_file: crates/metadata-resolve/tests/passing/supergraph/no_subgraphs/metada
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -46,6 +46,7 @@ input_file: crates/metadata-resolve/tests/passing/supergraph/present/metadata.js
),
enable_apollo_federation_fields: false,
},
pre_parse_plugins: [],
roles: [],
},
[],

View File

@ -2007,6 +2007,146 @@
"String"
]
},
"LeafConfig": {
"$id": "https://hasura.io/jsonschemas/metadata/LeafConfig",
"title": "LeafConfig",
"description": "Leaf Configuration.",
"type": "object",
"additionalProperties": false
},
"LifecyclePluginHookConfig": {
"$id": "https://hasura.io/jsonschemas/metadata/LifecyclePluginHookConfig",
"title": "LifecyclePluginHookConfig",
"description": "Configuration for a lifecycle plugin hook.",
"type": "object",
"required": [
"request"
],
"properties": {
"request": {
"description": "Configuration for the request to the lifecycle plugin hook.",
"allOf": [
{
"$ref": "#/definitions/LifecyclePluginHookConfigRequest"
}
]
}
},
"additionalProperties": false
},
"LifecyclePluginHookConfigRequest": {
"$id": "https://hasura.io/jsonschemas/metadata/LifecyclePluginHookConfigRequest",
"title": "LifecyclePluginHookConfigRequest",
"description": "Configuration for a lifecycle plugin hook request.",
"type": "object",
"required": [
"rawRequest"
],
"properties": {
"headers": {
"description": "Configuration for the headers.",
"anyOf": [
{
"$ref": "#/definitions/LifecyclePluginHookHeadersConfig"
},
{
"type": "null"
}
]
},
"session": {
"description": "Configuration for the session (includes roles and session variables).",
"anyOf": [
{
"$ref": "#/definitions/LeafConfig"
},
{
"type": "null"
}
]
},
"rawRequest": {
"description": "Configuration for the raw request.",
"allOf": [
{
"$ref": "#/definitions/RawRequestConfig"
}
]
}
},
"additionalProperties": false
},
"LifecyclePluginHookHeadersConfig": {
"$id": "https://hasura.io/jsonschemas/metadata/LifecyclePluginHookHeadersConfig",
"title": "LifecyclePluginHookHeadersConfig",
"description": "Configuration for a lifecycle plugin hook headers.",
"type": "object",
"properties": {
"additional": {
"description": "Additional headers to be sent with the request.",
"anyOf": [
{
"$ref": "#/definitions/HttpHeaders"
},
{
"type": "null"
}
]
},
"forward": {
"description": "Headers to be forwarded from the incoming request.",
"default": [],
"type": "array",
"items": {
"type": "string"
}
}
},
"additionalProperties": false
},
"LifecyclePluginHookV1": {
"$id": "https://hasura.io/jsonschemas/metadata/LifecyclePluginHookV1",
"title": "LifecyclePluginHookV1",
"description": "Definition of a lifecycle plugin hook - version 1.",
"oneOf": [
{
"title": "LifecyclePluginHookPreParse",
"description": "Definition of a lifecycle plugin hook for the pre-parse stage.",
"type": "object",
"required": [
"config",
"name",
"pre",
"url"
],
"properties": {
"pre": {
"type": "string",
"enum": [
"parse"
]
},
"name": {
"description": "The name of the lifecycle plugin hook.",
"type": "string"
},
"url": {
"description": "The URL to access the lifecycle plugin hook.",
"type": "string"
},
"config": {
"description": "Configuration for the lifecycle plugin hook.",
"allOf": [
{
"$ref": "#/definitions/LifecyclePluginHookConfig"
}
]
}
},
"additionalProperties": false
}
]
},
"LimitInputGraphqlConfig": {
"$id": "https://hasura.io/jsonschemas/metadata/LimitInputGraphqlConfig",
"title": "LimitInputGraphqlConfig",
@ -3956,6 +4096,66 @@
"additionalProperties": false
}
]
},
{
"$id": "https://hasura.io/jsonschemas/metadata/LifecyclePluginHook",
"title": "LifecyclePluginHook",
"description": "Definition of a lifecycle plugin hook.",
"examples": [
{
"kind": "LifecyclePluginHook",
"version": "v1",
"definition": {
"pre": "parse",
"name": "test",
"url": "http://localhost:8080",
"config": {
"request": {
"headers": {
"additional": {
"hasura-m-auth": {
"value": "zZkhKqFjqXR4g5MZCsJUZCnhCcoPyZ"
}
}
},
"session": {},
"rawRequest": {
"query": {},
"variables": {}
}
}
}
}
}
],
"oneOf": [
{
"type": "object",
"required": [
"definition",
"kind",
"version"
],
"properties": {
"kind": {
"type": "string",
"enum": [
"LifecyclePluginHook"
]
},
"version": {
"type": "string",
"enum": [
"v1"
]
},
"definition": {
"$ref": "#/definitions/LifecyclePluginHookV1"
}
},
"additionalProperties": false
}
]
}
]
},
@ -4248,6 +4448,37 @@
},
"additionalProperties": false
},
"RawRequestConfig": {
"$id": "https://hasura.io/jsonschemas/metadata/RawRequestConfig",
"title": "RawRequestConfig",
"description": "Configuration for the raw request.",
"type": "object",
"properties": {
"query": {
"description": "Configuration for the query.",
"anyOf": [
{
"$ref": "#/definitions/LeafConfig"
},
{
"type": "null"
}
]
},
"variables": {
"description": "Configuration for the variables.",
"anyOf": [
{
"$ref": "#/definitions/LeafConfig"
},
{
"type": "null"
}
]
}
},
"additionalProperties": false
},
"ReadWriteUrls": {
"$id": "https://hasura.io/jsonschemas/metadata/ReadWriteUrls",
"title": "ReadWriteUrls",

View File

@ -3,7 +3,7 @@ use std::collections::HashSet;
use crate::identifier::SubgraphName;
use crate::{
aggregates, boolean_expression, commands, data_connector, flags, graphql_config, models,
order_by_expression, permissions, relationships, types, Metadata, MetadataWithVersion,
order_by_expression, permissions, plugins, relationships, types, Metadata, MetadataWithVersion,
OpenDdSubgraphObject, OpenDdSupergraphObject,
};
@ -46,6 +46,7 @@ pub struct MetadataAccessor {
pub flags: flags::Flags,
// `graphql_config` is a vector because we want to do some validation depending on the presence of the object
pub graphql_config: Vec<QualifiedObject<graphql_config::GraphqlConfig>>,
pub plugins: Vec<QualifiedObject<plugins::LifecyclePluginHookV1>>,
}
fn load_metadata_objects(
@ -140,6 +141,11 @@ fn load_metadata_objects(
.command_permissions
.push(QualifiedObject::new(subgraph, permissions.upgrade()));
}
OpenDdSubgraphObject::LifecyclePluginHook(plugin) => {
accessor
.plugins
.push(QualifiedObject::new(subgraph, plugin.upgrade()));
}
}
}
}
@ -215,6 +221,7 @@ impl MetadataAccessor {
command_permissions: vec![],
flags: flags.unwrap_or(DEFAULT_FLAGS),
graphql_config: vec![],
plugins: vec![],
}
}
}

View File

@ -7,7 +7,8 @@ mod v1;
pub use v1::{
DataConnectorArgumentPreset, DataConnectorArgumentPresetValue, DataConnectorLinkV1,
DataConnectorUrlV1 as DataConnectorUrl, HttpHeadersPreset, ReadWriteUrls, ResponseHeaders,
DataConnectorUrlV1 as DataConnectorUrl, HttpHeaders, HttpHeadersPreset, ReadWriteUrls,
ResponseHeaders,
};
use crate::{identifier::Identifier, impl_OpenDd_default_for, str_newtype};

View File

@ -29,7 +29,9 @@ pub enum DataConnectorUrlV1 {
ReadWriteUrls(ReadWriteUrls),
}
#[derive(Serialize, Default, Clone, Debug, PartialEq, opendds_derive::OpenDd)]
#[derive(
Serialize, Deserialize, Default, Clone, Debug, Eq, PartialEq, opendds_derive::OpenDd, JsonSchema,
)]
/// Key value map of HTTP headers to be sent with an HTTP request. The key is the
/// header name and the value is a potential reference to an environment variable.
// We wrap maps into newtype structs so that we have a type and title for them in the JSONSchema which

View File

@ -15,6 +15,7 @@ pub mod identifier;
pub mod models;
pub mod order_by_expression;
pub mod permissions;
pub mod plugins;
pub mod query;
pub mod relationships;
pub mod session_variables;
@ -115,6 +116,9 @@ pub enum OpenDdSubgraphObject {
TypePermissions(permissions::TypePermissions),
ModelPermissions(permissions::ModelPermissions),
CommandPermissions(permissions::CommandPermissions),
// Plugin
LifecyclePluginHook(plugins::LifecyclePluginHook),
}
/// All of the metadata required to run Hasura v3 engine.

View File

@ -0,0 +1,197 @@
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use crate::{data_connector::HttpHeaders, impl_OpenDd_default_for};
#[derive(
Serialize, Deserialize, Clone, Debug, Eq, PartialEq, opendds_derive::OpenDd, JsonSchema,
)]
#[serde(tag = "version", content = "definition")]
#[serde(rename_all = "camelCase")]
#[opendd(
as_versioned_with_definition,
json_schema(
title = "LifecyclePluginHook",
example = "LifecyclePluginHook::example"
)
)]
#[schemars(title = "LifecyclePluginHook")]
/// Definition of a lifecycle plugin hook.
pub enum LifecyclePluginHook {
V1(LifecyclePluginHookV1),
}
impl LifecyclePluginHook {
fn example() -> serde_json::Value {
serde_json::json!(
{
"kind": "LifecyclePluginHook",
"version": "v1",
"definition": {
"pre": "parse",
"name": "test",
"url": "http://localhost:8080",
"config": {
"request": {
"headers": {
"additional": {
"hasura-m-auth": {
"value": "zZkhKqFjqXR4g5MZCsJUZCnhCcoPyZ"
}
}
},
"session": {},
"rawRequest": {
"query": {},
"variables": {}
}
}
}
}
}
)
}
pub fn upgrade(self) -> LifecyclePluginHookV1 {
match self {
LifecyclePluginHook::V1(v1) => v1,
}
}
}
#[derive(
Serialize, Deserialize, JsonSchema, Clone, Debug, Eq, PartialEq, opendds_derive::OpenDd,
)]
#[serde(rename_all = "camelCase")]
#[serde(tag = "pre")]
#[schemars(title = "LifecyclePluginHookV1")]
#[serde(deny_unknown_fields)]
/// Definition of a lifecycle plugin hook - version 1.
pub enum LifecyclePluginHookV1 {
/// Definition of a lifecycle plugin hook for the pre-parse stage.
Parse(LifecyclePluginHookPreParse),
}
type LifecyclePluginUrl = String;
type LifecyclePluginName = String;
#[derive(
Serialize, Deserialize, JsonSchema, Clone, Debug, Eq, PartialEq, opendds_derive::OpenDd,
)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
#[schemars(title = "LifecyclePluginHookPreParse")]
/// Definition of a lifecycle plugin hook for the pre-parse stage.
pub struct LifecyclePluginHookPreParse {
/// The name of the lifecycle plugin hook.
pub name: LifecyclePluginName,
/// The URL to access the lifecycle plugin hook.
pub url: LifecyclePluginUrl,
/// Configuration for the lifecycle plugin hook.
pub config: LifecyclePluginHookConfig,
}
#[derive(
Serialize, Deserialize, JsonSchema, Clone, Debug, Eq, PartialEq, opendds_derive::OpenDd,
)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
#[schemars(title = "LifecyclePluginHookConfig")]
/// Configuration for a lifecycle plugin hook.
pub struct LifecyclePluginHookConfig {
/// Configuration for the request to the lifecycle plugin hook.
pub request: LifecyclePluginHookConfigRequest,
}
#[derive(
Serialize, Deserialize, JsonSchema, Clone, Debug, Eq, PartialEq, opendds_derive::OpenDd,
)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
#[schemars(title = "LifecyclePluginHookConfigRequest")]
/// Configuration for a lifecycle plugin hook request.
pub struct LifecyclePluginHookConfigRequest {
/// Configuration for the headers.
pub headers: Option<LifecyclePluginHookHeadersConfig>,
/// Configuration for the session (includes roles and session variables).
pub session: Option<LeafConfig>,
/// Configuration for the raw request.
pub raw_request: RawRequestConfig,
}
#[derive(
Serialize, Deserialize, JsonSchema, Clone, Debug, Eq, PartialEq, opendds_derive::OpenDd,
)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
#[schemars(title = "LifecyclePluginHookHeadersConfig")]
/// Configuration for a lifecycle plugin hook headers.
pub struct LifecyclePluginHookHeadersConfig {
/// Additional headers to be sent with the request.
pub additional: Option<HttpHeaders>,
#[serde(default)]
/// Headers to be forwarded from the incoming request.
pub forward: Vec<String>,
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, JsonSchema, Eq)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
#[schemars(title = "LeafConfig")]
/// Leaf Configuration.
pub struct LeafConfig {}
impl_OpenDd_default_for!(LeafConfig);
#[derive(
Serialize, Deserialize, JsonSchema, Clone, Debug, Eq, PartialEq, opendds_derive::OpenDd,
)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
#[schemars(title = "RawRequestConfig")]
/// Configuration for the raw request.
pub struct RawRequestConfig {
/// Configuration for the query.
pub query: Option<LeafConfig>,
/// Configuration for the variables.
pub variables: Option<LeafConfig>,
}
#[test]
fn test_lifecycle_plugin_hook_parse() {
let hook = LifecyclePluginHook::V1(LifecyclePluginHookV1::Parse(LifecyclePluginHookPreParse {
name: "test".to_string(),
url: "http://localhost:8080".to_string(),
config: LifecyclePluginHookConfig {
request: LifecyclePluginHookConfigRequest {
headers: Some(LifecyclePluginHookHeadersConfig {
additional: Some(HttpHeaders(
vec![(
"hasura-m-auth".to_string(),
crate::EnvironmentValue {
value: "zZkhKqFjqXR4g5MZCsJUZCnhCcoPyZ".to_string(),
},
)]
.into_iter()
.collect(),
)),
forward: Vec::default(),
}),
session: Some(LeafConfig {}),
raw_request: RawRequestConfig {
query: Some(LeafConfig {}),
variables: Some(LeafConfig {}),
},
},
},
}));
let json = serde_json::to_string(&hook).unwrap();
let _hook: LifecyclePluginHook = serde_json::from_str(&json).unwrap();
assert_eq!(hook, _hook);
let json = LifecyclePluginHook::example();
let _hook: LifecyclePluginHook = serde_json::from_value(json).unwrap();
assert_eq!(hook, _hook);
}

View File

@ -9,10 +9,10 @@ license.workspace = true
hasura-authn-core = { path = "../../auth/hasura-authn-core" }
lang-graphql = { path = "../../lang-graphql" }
tracing-util = { path = "../../utils/tracing-util" }
open-dds = { path = "../../open-dds" }
axum = { workspace = true }
reqwest = { workspace = true, features = ["json"] }
schemars = { workspace = true, features = ["smol_str", "url"] }
serde = { workspace = true }
serde_json = { workspace = true }
thiserror = { workspace = true }

View File

@ -1,70 +0,0 @@
use reqwest::Url;
use schemars::JsonSchema;
use serde::{de::Error as SerdeDeError, Deserialize, Deserializer, Serialize, Serializer};
#[derive(Serialize, Deserialize, Clone, Debug, JsonSchema, PartialEq)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
#[schemars(title = "RequestConfig")]
pub struct RequestConfig {
pub headers: bool,
pub session: bool,
pub raw_request: RawRequestConfig,
}
#[derive(Serialize, Deserialize, Clone, Debug, JsonSchema, PartialEq)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
#[schemars(title = "RawRequestConfig")]
pub struct RawRequestConfig {
pub query: bool,
pub variables: bool,
}
fn serialize_url<S>(url: &Url, s: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
s.serialize_str(url.as_str())
}
fn deserialize_url<'de, D>(deserializer: D) -> Result<Url, D::Error>
where
D: Deserializer<'de>,
{
let buf = String::deserialize(deserializer)?;
Url::parse(&buf).map_err(SerdeDeError::custom)
}
#[derive(Serialize, Deserialize, Clone, Debug, JsonSchema, PartialEq)]
#[serde(rename_all = "camelCase")]
#[serde(deny_unknown_fields)]
#[schemars(title = "PrePluginConfig")]
#[schemars(example = "PrePluginConfig::example")]
pub struct PrePluginConfig {
pub name: String,
#[serde(serialize_with = "serialize_url", deserialize_with = "deserialize_url")]
pub url: Url,
pub request: RequestConfig,
}
impl PrePluginConfig {
fn example() -> Self {
serde_json::from_str(
r#"{
"name": "example",
"url": "http://example.com",
"request": {
"headers": true,
"session": true,
"rawRequest": {
"query": true,
"variables": true
}
}
}"#,
)
.unwrap()
}
}

View File

@ -1,23 +1,25 @@
use std::{collections::HashMap, fmt::Display};
use std::{collections::HashMap, fmt::Display, str::FromStr};
use axum::{
body::HttpBody,
http::{HeaderMap, Request, StatusCode},
http::{HeaderMap, HeaderName, Request, StatusCode},
response::IntoResponse,
};
use serde::Serialize;
use crate::configuration::PrePluginConfig;
use hasura_authn_core::Session;
use lang_graphql::{ast::common as ast, http::RawRequest};
use open_dds::plugins::LifecyclePluginHookPreParse;
use tracing_util::{
set_attribute_on_active_span, ErrorVisibility, SpanVisibility, Traceable, TraceableError,
};
#[derive(Debug, thiserror::Error)]
pub enum Error {
#[error("Error while making the HTTP request to the pre-execution plugin {0} - {1}")]
#[error("Error while making the HTTP request to the pre-parse plugin {0} - {1}")]
ErrorWhileMakingHTTPRequestToTheHook(String, reqwest::Error),
#[error("Error while building the request for the pre-parse plugin {0} - {1}")]
BuildRequestError(String, String),
#[error("Reqwest error: {0}")]
ReqwestError(reqwest::Error),
#[error("Unexpected status code: {0}")]
@ -44,16 +46,14 @@ impl IntoResponse for Error {
#[derive(Debug, Clone)]
pub enum ErrorResponse {
UserError(Vec<u8>),
InternalError(Option<Vec<u8>>),
UserError(serde_json::Value),
InternalError(Option<serde_json::Value>),
}
impl std::fmt::Display for ErrorResponse {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
let message = match self {
ErrorResponse::UserError(error) | ErrorResponse::InternalError(Some(error)) => {
let error = serde_json::from_slice::<serde_json::Value>(error)
.map_err(|_| std::fmt::Error)?;
error.to_string()
}
ErrorResponse::InternalError(None) => String::new(),
@ -106,14 +106,33 @@ pub struct PreExecutePluginRequestBody {
fn build_request(
http_client: &reqwest::Client,
config: &PrePluginConfig,
config: &LifecyclePluginHookPreParse,
client_headers: &HeaderMap,
session: &Session,
raw_request: &RawRequest,
) -> reqwest::RequestBuilder {
) -> Result<reqwest::RequestBuilder, String> {
let mut pre_plugin_headers = tracing_util::get_trace_headers();
if config.request.headers {
pre_plugin_headers.extend(client_headers.clone());
if let Some(header_config) = config.config.request.headers.as_ref() {
let mut headers = HeaderMap::new();
if let Some(additional_headers) = &header_config.additional {
for (key, value) in &additional_headers.0 {
let header_name =
HeaderName::from_str(key).map_err(|_| format!("Invalid header name {key}"))?;
let header_value = value
.value
.parse()
.map_err(|_| format!("Invalid value for the header {key}"))?;
headers.insert(header_name, header_value);
}
}
for header in &header_config.forward {
if let Some(header_value) = client_headers.get(header) {
let header_name = HeaderName::from_str(header)
.map_err(|_| format!("Invalid header name {header}"))?;
headers.insert(header_name, header_value.clone());
}
}
pre_plugin_headers.extend(headers);
}
let mut request_builder = http_client
.post(config.url.clone())
@ -126,25 +145,25 @@ fn build_request(
operation_name: raw_request.operation_name.clone(),
},
};
if config.request.session {
if config.config.request.session.is_some() {
request_body.session = Some(session.clone());
};
if config.request.raw_request.query {
if config.config.request.raw_request.query.is_some() {
request_body.raw_request.query = Some(raw_request.query.clone());
};
if config.request.raw_request.variables {
if config.config.request.raw_request.variables.is_some() {
request_body
.raw_request
.variables
.clone_from(&raw_request.variables);
};
request_builder = request_builder.json(&request_body);
request_builder
Ok(request_builder)
}
pub async fn execute_plugin(
http_client: &reqwest::Client,
config: &PrePluginConfig,
config: &LifecyclePluginHookPreParse,
client_headers: &HeaderMap,
session: &Session,
raw_request: &RawRequest,
@ -158,7 +177,8 @@ pub async fn execute_plugin(
|| {
Box::pin(async {
let http_request_builder =
build_request(http_client, config, client_headers, session, raw_request);
build_request(http_client, config, client_headers, session, raw_request)
.map_err(|err| Error::BuildRequestError(config.name.clone(), err))?;
let req = http_request_builder.build().map_err(Error::ReqwestError)?;
http_client.execute(req).await.map_err(|e| {
Error::ErrorWhileMakingHTTPRequestToTheHook(config.name.clone(), e)
@ -174,16 +194,17 @@ pub async fn execute_plugin(
Ok(PreExecutePluginResponse::Return(body.to_vec()))
}
StatusCode::INTERNAL_SERVER_ERROR => {
let body = response.bytes().await.map_err(Error::ReqwestError)?;
let body = response.json().await.map_err(Error::ReqwestError)?;
Ok(PreExecutePluginResponse::ReturnError(
ErrorResponse::InternalError(Some(body.to_vec())),
ErrorResponse::InternalError(Some(body)),
))
}
StatusCode::BAD_REQUEST => {
let body = response.bytes().await.map_err(Error::ReqwestError)?;
let response_json: serde_json::Value =
response.json().await.map_err(Error::ReqwestError)?;
Ok(PreExecutePluginResponse::ReturnError(
ErrorResponse::UserError(body.to_vec()),
ErrorResponse::UserError(response_json),
))
}
_ => Err(Error::UnexpectedStatusCode(response.status().as_u16())),
@ -191,7 +212,7 @@ pub async fn execute_plugin(
}
pub async fn pre_execution_plugins_handler<'a, B>(
pre_execution_plugins_config: &Vec<PrePluginConfig>,
pre_execution_plugins_config: &Vec<LifecyclePluginHookPreParse>,
http_client: &reqwest::Client,
session: Session,
request: Request<B>,
@ -238,10 +259,8 @@ where
ErrorResponse::InternalError(error_value),
)) = &plugin_response
{
let error_value = serde_json::from_slice::<serde_json::Value>(
error_value.as_ref().unwrap_or(&vec![]),
)
.map_err(Error::PluginResponseParseError)?;
let error_value =
error_value.as_ref().unwrap_or(&serde_json::Value::Null);
set_attribute_on_active_span(
tracing_util::AttributeVisibility::Default,
"plugin.internal_error",
@ -252,9 +271,6 @@ where
ErrorResponse::UserError(error_value),
)) = &plugin_response
{
let error_value =
serde_json::from_slice::<serde_json::Value>(error_value)
.map_err(Error::PluginResponseParseError)?;
set_attribute_on_active_span(
tracing_util::AttributeVisibility::Default,
"plugin.user_error",
@ -273,8 +289,6 @@ where
}
PreExecutePluginResponse::Continue => (),
PreExecutePluginResponse::ReturnError(ErrorResponse::UserError(error_value)) => {
let error_value = serde_json::from_slice::<serde_json::Value>(&error_value)
.map_err(Error::PluginResponseParseError)?;
let user_error_response =
lang_graphql::http::Response::error_message_with_status_and_details(
reqwest::StatusCode::BAD_REQUEST,

View File

@ -1,2 +1 @@
pub mod configuration;
pub mod execute;