Store NDC capabilities in the resolved metadata. (#641)

## Description

This avoids a query to `/capabilities` when explaining.

## Changelog

- Add a changelog entry (in the "Changelog entry" section below) if the
changes
  in this PR have any user-facing impact. See
[changelog
guide](https://github.com/hasura/graphql-engine-mono/wiki/Changelog-Guide).
- If no changelog is required ignore/remove this section and add a
  `no-changelog-required` label to the PR.

### Product

_(Select all products this will be available in)_

- [x] community-edition
- [x] cloud
<!-- product : end : DO NOT REMOVE -->

### Type

<!-- See changelog structure:
https://github.com/hasura/graphql-engine-mono/wiki/Changelog-Guide#structure-of-our-changelog
-->

_(Select only one. In case of multiple, choose the most appropriate)_

- [ ] highlight
- [ ] enhancement
- [ ] bugfix
- [x] behaviour-change
- [ ] performance-enhancement
- [ ] security-fix
<!-- type : end : DO NOT REMOVE -->

### Changelog entry

<!--
  - Add a user understandable changelog entry
- Include all details needed to understand the change. Try including
links to docs or issues if relevant
  - For Highlights start with a H4 heading (#### <entry title>)
  - Get the changelog entry reviewed by your team
-->

The engine no longer makes a request to NDC capabilities when explaining
a query or mutation, instead using the stored capabilities from the
build.

<!-- changelog-entry : end : DO NOT REMOVE -->

<!-- changelog : end : DO NOT REMOVE -->

V3_GIT_ORIGIN_REV_ID: f38867e0f5a832c45a857c5e58d7a44c1c05d71f
This commit is contained in:
Samir Talwar 2024-05-30 20:42:08 +03:00 committed by hasura-bot
parent c09a6ee6c6
commit 11e1e02d59
15 changed files with 143 additions and 97 deletions

View File

@ -289,8 +289,7 @@
"capabilities": {
"query": {
"aggregates": {},
"variables": {},
"explain": {}
"variables": {}
},
"relationships": {
"relation_comparisons": {},
@ -955,7 +954,9 @@
"relation_comparisons": {},
"order_by_aggregate": {}
},
"mutation": {}
"mutation": {
"explain": {}
}
},
"version": "0.1.0"
}

View File

@ -289,17 +289,14 @@ async fn fetch_explain_from_data_connector(
Box::pin(async {
let ndc_config = ndc_client::Configuration {
base_path: data_connector.url.get_url(ast::OperationType::Query),
user_agent: None,
// This is isn't expensive, reqwest::Client is behind an Arc
client: http_context.client.clone(),
headers: &data_connector.headers.0,
response_size_limit: http_context.ndc_response_size_limit,
};
// TODO: use capabilities from the data connector context
let capabilities = ndc_client::capabilities_get(&ndc_config).await?;
match ndc_request {
types::NDCRequest::Query(query_request) => {
if capabilities.capabilities.query.explain.is_some() {
if data_connector.capabilities.supports_explaining_queries {
ndc_client::explain_query_post(&ndc_config, query_request)
.await
.map(Some)
@ -309,7 +306,7 @@ async fn fetch_explain_from_data_connector(
}
}
types::NDCRequest::Mutation(mutation_request) => {
if capabilities.capabilities.mutation.explain.is_some() {
if data_connector.capabilities.supports_explaining_mutations {
ndc_client::explain_mutation_post(&ndc_config, mutation_request)
.await
.map(Some)

View File

@ -400,36 +400,31 @@ mod tests {
use schema::GDS;
#[test]
fn test_generate_ir() {
fn test_generate_ir() -> Result<(), Box<dyn std::error::Error>> {
let test_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("tests");
let mut mint = Mint::new(&test_dir);
let schema = fs::read_to_string(test_dir.join("schema.json")).unwrap();
let schema = fs::read_to_string(test_dir.join("schema.json"))?;
let gds = GDS::new(open_dds::Metadata::from_json_str(&schema).unwrap()).unwrap();
let schema = GDS::build_schema(&gds).unwrap();
let gds = GDS::new(open_dds::Metadata::from_json_str(&schema)?)?;
let schema = GDS::build_schema(&gds)?;
for input_file in fs::read_dir(test_dir.join("generate_ir")).unwrap() {
let entry = input_file.unwrap();
let raw_request = {
let path = entry.path();
assert!(path.is_dir());
fs::read_to_string(path.join("request.gql")).unwrap()
};
let expected_path = {
let path = entry.path();
let test_name = path.file_name().unwrap().to_str().unwrap();
PathBuf::from_iter(["generate_ir", test_name, "expected.json"])
};
for input_file in fs::read_dir(test_dir.join("generate_ir"))? {
let path = input_file?.path();
assert!(path.is_dir());
let session = {
let path = entry.path();
let session_vars_path = path.join("session_variables.json");
resolve_session(session_vars_path)
};
let query = Parser::new(&raw_request)
.parse_executable_document()
.unwrap();
let test_name = path
.file_name()
.ok_or_else(|| format!("{path:?} is not a normal file or directory"))?;
let raw_request = fs::read_to_string(path.join("request.gql"))?;
let expected_path = PathBuf::from("generate_ir")
.join(test_name)
.join("expected.json");
let session_vars_path = path.join("session_variables.json");
let session = resolve_session(session_vars_path);
let query = Parser::new(&raw_request).parse_executable_document()?;
let request = Request {
operation_name: None,
@ -437,25 +432,25 @@ mod tests {
variables: HashMap::new(),
};
let normalized_request = normalize_request(&session.role, &schema, &request).unwrap();
let normalized_request = normalize_request(&session.role, &schema, &request)?;
let ir = generate_ir(&schema, &session, &normalized_request).unwrap();
let mut expected = mint
.new_goldenfile_with_differ(
expected_path,
Box::new(|file1, file2| {
let json1: serde_json::Value =
serde_json::from_reader(File::open(file1).unwrap()).unwrap();
let json2: serde_json::Value =
serde_json::from_reader(File::open(file2).unwrap()).unwrap();
if json1 != json2 {
text_diff(file1, file2)
}
}),
)
.unwrap();
write!(expected, "{}", serde_json::to_string_pretty(&ir).unwrap()).unwrap();
let ir = generate_ir(&schema, &session, &normalized_request)?;
let mut expected = mint.new_goldenfile_with_differ(
expected_path,
Box::new(|file1, file2| {
let json1: serde_json::Value =
serde_json::from_reader(File::open(file1).unwrap()).unwrap();
let json2: serde_json::Value =
serde_json::from_reader(File::open(file2).unwrap()).unwrap();
if json1 != json2 {
text_diff(file1, file2)
}
}),
)?;
write!(expected, "{}", serde_json::to_string_pretty(&ir)?)?;
}
Ok(())
}
// TODO: remove duplication between this function and 'add_session'

View File

@ -80,7 +80,6 @@ pub(crate) async fn fetch_from_data_connector<'s>(
append_project_id_to_headers(&data_connector.headers.0, project_id)?;
let ndc_config = client::Configuration {
base_path: data_connector.url.get_url(ast::OperationType::Query),
user_agent: None,
// This is isn't expensive, reqwest::Client is behind an Arc
client: http_context.client.clone(),
headers: &headers,
@ -208,7 +207,6 @@ pub(crate) async fn fetch_from_data_connector_mutation<'s>(
append_project_id_to_headers(&data_connector.headers.0, project_id)?;
let ndc_config = client::Configuration {
base_path: data_connector.url.get_url(ast::OperationType::Mutation),
user_agent: None,
// This is isn't expensive, reqwest::Client is behind an Arc
client: http_context.client.clone(),
headers: &headers,

View File

@ -1,5 +1,3 @@
use std::convert::identity;
use reqwest::header::{HeaderMap, HeaderValue};
use serde::{de::DeserializeOwned, Deserialize};
use thiserror::Error;
@ -64,36 +62,11 @@ pub struct InvalidConnectorError {
#[derive(Debug, Clone)]
pub struct Configuration<'s> {
pub base_path: &'s reqwest::Url,
pub user_agent: Option<String>,
pub client: reqwest::Client,
pub headers: &'s HeaderMap<HeaderValue>,
pub response_size_limit: Option<usize>,
}
/// GET on /capabilities endpoint
///
/// <https://hasura.github.io/ndc-spec/specification/capabilities.html>
pub async fn capabilities_get(
configuration: &Configuration<'_>,
) -> Result<ndc_models::CapabilitiesResponse, Error> {
let tracer = tracing_util::global_tracer();
tracer
.in_span_async(
"capabilities_get",
"Get capabilities",
SpanVisibility::Internal,
|| {
Box::pin(async {
let url = append_path(configuration.base_path, &["capabilities"])?;
let request =
construct_request(configuration, reqwest::Method::GET, url, identity);
execute_request(configuration, request).await
})
},
)
.await
}
/// POST on /query/explain endpoint
///
/// <https://hasura.github.io/ndc-spec/specification/explain.html?highlight=%2Fexplain#request>
@ -230,11 +203,6 @@ fn construct_request(
let mut request_builder = configuration.client.request(method, url);
// Apply customizations
request_builder = modify(request_builder);
// Set user agent if provided
if let Some(ref user_agent) = configuration.user_agent {
request_builder =
request_builder.header(reqwest::header::USER_AGENT, user_agent);
}
// Set headers from configuration
request_builder = request_builder.headers(configuration.headers.clone());
// Return the prepared request

View File

@ -104,6 +104,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "article",

View File

@ -95,6 +95,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "article",

View File

@ -36,6 +36,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "article",
@ -147,6 +151,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "author",
@ -258,6 +266,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "article",
@ -467,6 +479,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"arguments": {},
@ -486,6 +502,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "article",
@ -512,6 +532,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "author",
@ -538,6 +562,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "article",
@ -584,6 +612,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"source_type_mappings": {
@ -628,6 +660,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "article",
@ -711,6 +747,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"source_type_mappings": {
@ -755,6 +795,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "author",
@ -838,6 +882,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"source_type_mappings": {
@ -882,6 +930,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "article",

View File

@ -104,6 +104,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "article",

View File

@ -95,6 +95,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "article",
@ -280,6 +284,10 @@
},
"headers": {
"hasura-m-auth-token": "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_.~!#$&'()*+,/:;=?@[]\""
},
"capabilities": {
"supports_explaining_queries": true,
"supports_explaining_mutations": false
}
},
"collection": "author",

View File

@ -476,8 +476,7 @@ pub(crate) fn resolve_model_predicate_with_type(
let data_connector_link = data_connectors::DataConnectorLink::new(
data_connector_name.clone(),
data_connector_context.inner.url.clone(),
data_connector_context.inner.headers,
&data_connector_context.inner,
)?;
// look up this type in the context of it's data connector

View File

@ -349,8 +349,7 @@ pub fn resolve_command_source(
let command_source = CommandSource {
data_connector: data_connectors::DataConnectorLink::new(
qualified_data_connector_name,
data_connector_context.inner.url.clone(),
data_connector_context.inner.headers,
&data_connector_context.inner,
)?,
source: command_source.data_connector_command.clone(),
type_mappings,

View File

@ -10,7 +10,6 @@ use open_dds::{
self, DataConnectorName, DataConnectorScalarType, DataConnectorUrl, ReadWriteUrls,
VersionedSchemaAndCapabilities,
},
EnvironmentValue,
};
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
@ -147,6 +146,7 @@ pub struct DataConnectorLink {
pub name: Qualified<DataConnectorName>,
pub url: ResolvedDataConnectorUrl,
pub headers: SerializableHeaderMap,
pub capabilities: DataConnectorCapabilities,
}
impl std::hash::Hash for DataConnectorLink {
@ -161,10 +161,9 @@ impl std::hash::Hash for DataConnectorLink {
impl DataConnectorLink {
pub(crate) fn new(
name: Qualified<DataConnectorName>,
url: DataConnectorUrl,
headers: &IndexMap<String, EnvironmentValue>,
info: &DataConnectorCoreInfo<'_>,
) -> Result<Self, Error> {
let url = match url {
let url = match info.url {
DataConnectorUrl::SingleUrl(url) => ResolvedDataConnectorUrl::SingleUrl(
SerializableUrl::new(&url.value).map_err(|e| Error::InvalidDataConnectorUrl {
data_connector_name: name.clone(),
@ -188,7 +187,7 @@ impl DataConnectorLink {
})
}
};
let headers = SerializableHeaderMap::new(headers).map_err(|e| match e {
let headers = SerializableHeaderMap::new(info.headers).map_err(|e| match e {
HeaderError::InvalidHeaderName { header_name } => Error::InvalidHeaderName {
data_connector: name.clone(),
header_name,
@ -198,7 +197,21 @@ impl DataConnectorLink {
header_name,
},
})?;
Ok(Self { name, url, headers })
let capabilities = DataConnectorCapabilities {
supports_explaining_queries: info.capabilities.capabilities.query.explain.is_some(),
supports_explaining_mutations: info
.capabilities
.capabilities
.mutation
.explain
.is_some(),
};
Ok(Self {
name,
url,
headers,
capabilities,
})
}
}
@ -231,6 +244,12 @@ impl ResolvedDataConnectorUrl {
}
}
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq)]
pub struct DataConnectorCapabilities {
pub supports_explaining_queries: bool,
pub supports_explaining_mutations: bool,
}
#[cfg(test)]
mod tests {
use ndc_models;

View File

@ -700,8 +700,7 @@ fn resolve_model_source(
let resolved_model_source = ModelSource {
data_connector: data_connectors::DataConnectorLink::new(
qualified_data_connector_name,
data_connector_context.inner.url.clone(),
data_connector_context.inner.headers,
&data_connector_context.inner,
)?,
collection: model_source.collection.clone(),
type_mappings,

View File

@ -268,8 +268,7 @@ pub(crate) fn resolve_object_boolean_expression_type(
let data_connector_link = data_connectors::DataConnectorLink::new(
data_connector_name,
data_connector_context.inner.url.clone(),
data_connector_context.inner.headers,
&data_connector_context.inner,
)?;
let resolved_boolean_expression = ObjectBooleanExpressionType {