mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-15 01:12:56 +03:00
Use references where possible instead of cloning. (#478)
When trying to reduce the number of dependencies we use in the engine, I was blocked by a few `.clone()` calls that, on inspection, turned out to be completely unnecessary. I have replaced those with passing by reference, and then gone on a pedant spree. I enabled the `needless_pass_by_value` Clippy warning and fixed it everywhere that it highlighted. In most places, this meant adding `&`, but I also marked some types as `Copy`, which makes pass-by-value the right move. In one place, I replaced calls to `async_map` with `if` and `else`, to avoid constructing closures that capture across async boundaries. This means I could just delete `async_map`. V3_GIT_ORIGIN_REV_ID: 6ff71f0c553b707889d89552eff3e8c001e898cc
This commit is contained in:
parent
7b510d1373
commit
c7d9baaf66
@ -23,7 +23,6 @@ must_use_candidate = "allow"
|
||||
wildcard_imports = "allow"
|
||||
# disable these for now, but we should probably fix them
|
||||
cast_precision_loss = "allow"
|
||||
cloned_instead_of_copied = "allow"
|
||||
default_trait_access = "allow"
|
||||
explicit_into_iter_loop = "allow"
|
||||
explicit_iter_loop = "allow"
|
||||
@ -41,7 +40,6 @@ manual_string_new = "allow"
|
||||
map_unwrap_or = "allow"
|
||||
match_same_arms = "allow"
|
||||
match_wildcard_for_single_variants = "allow"
|
||||
needless_pass_by_value = "allow"
|
||||
redundant_else = "allow"
|
||||
result_large_err = "allow"
|
||||
return_self_not_must_use = "allow"
|
||||
@ -51,7 +49,6 @@ single_match_else = "allow"
|
||||
struct_field_names = "allow"
|
||||
too_many_arguments = "allow"
|
||||
too_many_lines = "allow"
|
||||
trivially_copy_pass_by_ref = "allow"
|
||||
uninlined_format_args = "allow"
|
||||
unnecessary_box_returns = "allow"
|
||||
unnecessary_wraps = "allow"
|
||||
|
@ -73,14 +73,14 @@ pub async fn post_query(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Json(request): Json<ndc_models::QueryRequest>,
|
||||
) -> Result<Json<ndc_models::QueryResponse>> {
|
||||
custom_connector::query::execute_query_request(state.borrow(), request).map(Json)
|
||||
custom_connector::query::execute_query_request(state.borrow(), &request).map(Json)
|
||||
}
|
||||
|
||||
async fn post_mutation(
|
||||
State(state): State<Arc<AppState>>,
|
||||
Json(request): Json<ndc_models::MutationRequest>,
|
||||
) -> Result<Json<ndc_models::MutationResponse>> {
|
||||
custom_connector::mutation::execute_mutation_request(state.borrow(), request).map(Json)
|
||||
custom_connector::mutation::execute_mutation_request(state.borrow(), &request).map(Json)
|
||||
}
|
||||
|
||||
async fn post_explain(
|
||||
|
@ -9,7 +9,7 @@ type Result<A> = std::result::Result<A, (StatusCode, Json<ndc_models::ErrorRespo
|
||||
|
||||
pub fn execute_mutation_request(
|
||||
state: &AppState,
|
||||
request: ndc_models::MutationRequest,
|
||||
request: &ndc_models::MutationRequest,
|
||||
) -> Result<ndc_models::MutationResponse> {
|
||||
let mut operation_results = vec![];
|
||||
|
||||
|
@ -16,14 +16,18 @@ use crate::{
|
||||
|
||||
pub type Result<A> = std::result::Result<A, (StatusCode, Json<ndc_models::ErrorResponse>)>;
|
||||
|
||||
const DEFAULT_VARIABLE_SETS: &[BTreeMap<String, Value>] = &[BTreeMap::new()];
|
||||
|
||||
pub fn execute_query_request(
|
||||
state: &AppState,
|
||||
request: ndc_models::QueryRequest,
|
||||
request: &ndc_models::QueryRequest,
|
||||
) -> Result<ndc_models::QueryResponse> {
|
||||
let variable_sets = request.variables.unwrap_or(vec![BTreeMap::new()]);
|
||||
|
||||
let variable_sets: &[BTreeMap<String, Value>] = request
|
||||
.variables
|
||||
.as_deref()
|
||||
.unwrap_or(DEFAULT_VARIABLE_SETS);
|
||||
let mut row_sets = vec![];
|
||||
for variables in variable_sets.iter() {
|
||||
for variables in variable_sets {
|
||||
let row_set = execute_query_with_variables(
|
||||
&request.collection,
|
||||
&request.arguments,
|
||||
@ -106,6 +110,7 @@ pub(crate) fn parse_object_argument<'a>(
|
||||
Ok(name_object)
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
enum Root<'a> {
|
||||
/// References to the root collection actually
|
||||
/// refer to the current row, because the path to
|
||||
@ -272,14 +277,14 @@ fn eval_aggregate(
|
||||
))
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
eval_aggregate_function(function, values)
|
||||
eval_aggregate_function(function, &values)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn eval_aggregate_function(
|
||||
function: &str,
|
||||
values: Vec<&serde_json::Value>,
|
||||
values: &[&serde_json::Value],
|
||||
) -> Result<serde_json::Value> {
|
||||
let int_values = values
|
||||
.iter()
|
||||
@ -416,7 +421,7 @@ fn eval_order_by_element(
|
||||
element: &ndc_models::OrderByElement,
|
||||
item: &Row,
|
||||
) -> Result<serde_json::Value> {
|
||||
match element.target.clone() {
|
||||
match &element.target {
|
||||
ndc_models::OrderByTarget::Column { name, path } => {
|
||||
eval_order_by_column(collection_relationships, variables, state, item, path, name)
|
||||
}
|
||||
@ -450,9 +455,9 @@ fn eval_order_by_star_count_aggregate(
|
||||
variables: &BTreeMap<String, serde_json::Value>,
|
||||
state: &AppState,
|
||||
item: &BTreeMap<String, serde_json::Value>,
|
||||
path: Vec<ndc_models::PathElement>,
|
||||
path: &[ndc_models::PathElement],
|
||||
) -> Result<serde_json::Value> {
|
||||
let rows: Vec<Row> = eval_path(collection_relationships, variables, state, &path, item)?;
|
||||
let rows: Vec<Row> = eval_path(collection_relationships, variables, state, path, item)?;
|
||||
Ok(rows.len().into())
|
||||
}
|
||||
|
||||
@ -461,15 +466,15 @@ fn eval_order_by_single_column_aggregate(
|
||||
variables: &BTreeMap<String, serde_json::Value>,
|
||||
state: &AppState,
|
||||
item: &BTreeMap<String, serde_json::Value>,
|
||||
path: Vec<ndc_models::PathElement>,
|
||||
column: String,
|
||||
function: String,
|
||||
path: &[ndc_models::PathElement],
|
||||
column: &str,
|
||||
function: &str,
|
||||
) -> Result<serde_json::Value> {
|
||||
let rows: Vec<Row> = eval_path(collection_relationships, variables, state, &path, item)?;
|
||||
let rows: Vec<Row> = eval_path(collection_relationships, variables, state, path, item)?;
|
||||
let values = rows
|
||||
.iter()
|
||||
.map(|row| {
|
||||
row.get(column.as_str()).ok_or((
|
||||
row.get(column).ok_or((
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(ndc_models::ErrorResponse {
|
||||
message: "invalid column name".into(),
|
||||
@ -478,7 +483,7 @@ fn eval_order_by_single_column_aggregate(
|
||||
))
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
eval_aggregate_function(&function, values)
|
||||
eval_aggregate_function(function, &values)
|
||||
}
|
||||
|
||||
fn eval_order_by_column(
|
||||
@ -486,10 +491,10 @@ fn eval_order_by_column(
|
||||
variables: &BTreeMap<String, serde_json::Value>,
|
||||
state: &AppState,
|
||||
item: &BTreeMap<String, serde_json::Value>,
|
||||
path: Vec<ndc_models::PathElement>,
|
||||
name: String,
|
||||
path: &[ndc_models::PathElement],
|
||||
name: &str,
|
||||
) -> Result<serde_json::Value> {
|
||||
let rows: Vec<Row> = eval_path(collection_relationships, variables, state, &path, item)?;
|
||||
let rows: Vec<Row> = eval_path(collection_relationships, variables, state, path, item)?;
|
||||
if rows.len() > 1 {
|
||||
return Err((
|
||||
StatusCode::BAD_REQUEST,
|
||||
@ -500,7 +505,7 @@ fn eval_order_by_column(
|
||||
));
|
||||
}
|
||||
match rows.first() {
|
||||
Some(row) => eval_column(row, name.as_str()),
|
||||
Some(row) => eval_column(row, name),
|
||||
None => Ok(serde_json::Value::Null),
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,6 @@ use crate::execute::remote_joins::types::{JoinId, JoinLocations, RemoteJoin};
|
||||
use crate::execute::{error, plan};
|
||||
use crate::metadata::resolved;
|
||||
use crate::schema::GDS;
|
||||
use crate::utils::async_map::AsyncMap;
|
||||
use async_recursion::async_recursion;
|
||||
use hasura_authn_core::Session;
|
||||
use lang_graphql as gql;
|
||||
@ -192,12 +191,8 @@ async fn get_execution_steps<'s>(
|
||||
let mut sequence_steps = match process_response_as {
|
||||
ProcessResponseAs::CommandResponse { .. } => {
|
||||
// A command execution node
|
||||
let data_connector_explain = fetch_explain_from_data_connector(
|
||||
http_context,
|
||||
ndc_request.clone(),
|
||||
data_connector,
|
||||
)
|
||||
.await;
|
||||
let data_connector_explain =
|
||||
fetch_explain_from_data_connector(http_context, &ndc_request, data_connector).await;
|
||||
NonEmpty::new(Box::new(types::Step::CommandSelect(
|
||||
types::CommandSelectIR {
|
||||
command_name: alias.to_string(),
|
||||
@ -208,12 +203,8 @@ async fn get_execution_steps<'s>(
|
||||
}
|
||||
ProcessResponseAs::Array { .. } | ProcessResponseAs::Object { .. } => {
|
||||
// A model execution node
|
||||
let data_connector_explain = fetch_explain_from_data_connector(
|
||||
http_context,
|
||||
ndc_request.clone(),
|
||||
data_connector,
|
||||
)
|
||||
.await;
|
||||
let data_connector_explain =
|
||||
fetch_explain_from_data_connector(http_context, &ndc_request, data_connector).await;
|
||||
NonEmpty::new(Box::new(types::Step::ModelSelect(types::ModelSelectIR {
|
||||
model_name: alias.to_string(),
|
||||
ndc_request,
|
||||
@ -221,8 +212,7 @@ async fn get_execution_steps<'s>(
|
||||
})))
|
||||
}
|
||||
};
|
||||
if let Some(join_steps) = get_join_steps(alias.to_string(), join_locations, http_context).await
|
||||
{
|
||||
if let Some(join_steps) = get_join_steps(join_locations, http_context).await {
|
||||
sequence_steps.push(Box::new(types::Step::Sequence(join_steps)));
|
||||
sequence_steps.push(Box::new(types::Step::HashJoin));
|
||||
};
|
||||
@ -235,7 +225,6 @@ async fn get_execution_steps<'s>(
|
||||
/// TODO: Currently the steps are sequential, we should make them parallel once the executor supports it.
|
||||
#[async_recursion]
|
||||
async fn get_join_steps(
|
||||
_root_field_name: String,
|
||||
join_locations: JoinLocations<(RemoteJoin<'async_recursion, 'async_recursion>, JoinId)>,
|
||||
http_context: &HttpContext,
|
||||
) -> Option<NonEmpty<Box<types::Step>>> {
|
||||
@ -248,7 +237,7 @@ async fn get_join_steps(
|
||||
let ndc_request = types::NDCRequest::Query(query_request);
|
||||
let data_connector_explain = fetch_explain_from_data_connector(
|
||||
http_context,
|
||||
ndc_request.clone(),
|
||||
&ndc_request,
|
||||
remote_join.target_data_connector,
|
||||
)
|
||||
.await;
|
||||
@ -272,7 +261,7 @@ async fn get_join_steps(
|
||||
},
|
||||
)))
|
||||
};
|
||||
if let Some(rest_join_steps) = get_join_steps(alias, location.rest, http_context).await {
|
||||
if let Some(rest_join_steps) = get_join_steps(location.rest, http_context).await {
|
||||
sequence_steps.push(Box::new(types::Step::Sequence(rest_join_steps)));
|
||||
sequence_steps.push(Box::new(types::Step::HashJoin));
|
||||
};
|
||||
@ -314,7 +303,7 @@ fn simplify_step(step: Box<types::Step>) -> Box<types::Step> {
|
||||
|
||||
async fn fetch_explain_from_data_connector(
|
||||
http_context: &HttpContext,
|
||||
ndc_request: types::NDCRequest,
|
||||
ndc_request: &types::NDCRequest,
|
||||
data_connector: &resolved::data_connector::DataConnectorLink,
|
||||
) -> types::NDCExplainResponse {
|
||||
let tracer = tracing_util::global_tracer();
|
||||
@ -333,39 +322,28 @@ async fn fetch_explain_from_data_connector(
|
||||
headers: data_connector.headers.0.clone(),
|
||||
response_size_limit: http_context.ndc_response_size_limit,
|
||||
};
|
||||
{
|
||||
// TODO: use capabilities from the data connector context
|
||||
let capabilities = ndc_client::capabilities_get(&ndc_config).await?;
|
||||
match ndc_request {
|
||||
types::NDCRequest::Query(query_request) => capabilities
|
||||
.capabilities
|
||||
.query
|
||||
.explain
|
||||
.async_map(|_| {
|
||||
Box::pin(async move {
|
||||
ndc_client::explain_query_post(&ndc_config, query_request)
|
||||
.await
|
||||
.map_err(error::Error::from)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.transpose(),
|
||||
types::NDCRequest::Mutation(mutation_request) => capabilities
|
||||
.capabilities
|
||||
.mutation
|
||||
.explain
|
||||
.async_map(|_| {
|
||||
Box::pin(async move {
|
||||
ndc_client::explain_mutation_post(
|
||||
&ndc_config,
|
||||
mutation_request,
|
||||
)
|
||||
.await
|
||||
.map_err(error::Error::from)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.transpose(),
|
||||
// TODO: use capabilities from the data connector context
|
||||
let capabilities = ndc_client::capabilities_get(&ndc_config).await?;
|
||||
match ndc_request {
|
||||
types::NDCRequest::Query(query_request) => {
|
||||
if capabilities.capabilities.query.explain.is_some() {
|
||||
ndc_client::explain_query_post(&ndc_config, query_request)
|
||||
.await
|
||||
.map(Some)
|
||||
.map_err(error::Error::from)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
types::NDCRequest::Mutation(mutation_request) => {
|
||||
if capabilities.capabilities.mutation.explain.is_some() {
|
||||
ndc_client::explain_mutation_post(&ndc_config, mutation_request)
|
||||
.await
|
||||
.map(Some)
|
||||
.map_err(error::Error::from)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
|
@ -112,7 +112,7 @@ pub(crate) enum NDCExplainResponse {
|
||||
Error(GraphQLError),
|
||||
}
|
||||
|
||||
#[derive(Serialize, Debug, PartialEq, Clone)]
|
||||
#[derive(Serialize, Debug, PartialEq)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[serde(tag = "type", content = "value")]
|
||||
pub(crate) enum NDCRequest {
|
||||
|
@ -83,7 +83,7 @@ pub(crate) fn generate_command_info<'n, 's>(
|
||||
field: &'n normalized_ast::Field<'s, GDS>,
|
||||
field_call: &'n normalized_ast::FieldCall<'s, GDS>,
|
||||
result_type: &QualifiedTypeReference,
|
||||
result_base_type_kind: &TypeKind,
|
||||
result_base_type_kind: TypeKind,
|
||||
command_source: &'s CommandSourceDetail,
|
||||
session_variables: &SessionVariables,
|
||||
) -> Result<CommandInfo<'s>, error::Error> {
|
||||
@ -118,7 +118,7 @@ pub(crate) fn generate_command_info<'n, 's>(
|
||||
|
||||
// Add the name of the root command
|
||||
let mut usage_counts = UsagesCounts::new();
|
||||
count_command(command_name.clone(), &mut usage_counts);
|
||||
count_command(command_name, &mut usage_counts);
|
||||
|
||||
let selection = selection_set::generate_nested_selection(
|
||||
result_type,
|
||||
@ -148,7 +148,7 @@ pub(crate) fn generate_function_based_command<'n, 's>(
|
||||
field: &'n normalized_ast::Field<'s, GDS>,
|
||||
field_call: &'n normalized_ast::FieldCall<'s, GDS>,
|
||||
result_type: &QualifiedTypeReference,
|
||||
result_base_type_kind: &TypeKind,
|
||||
result_base_type_kind: TypeKind,
|
||||
command_source: &'s CommandSourceDetail,
|
||||
session_variables: &SessionVariables,
|
||||
) -> Result<FunctionBasedCommand<'s>, error::Error> {
|
||||
@ -176,7 +176,7 @@ pub(crate) fn generate_procedure_based_command<'n, 's>(
|
||||
field: &'n normalized_ast::Field<'s, GDS>,
|
||||
field_call: &'n normalized_ast::FieldCall<'s, GDS>,
|
||||
result_type: &QualifiedTypeReference,
|
||||
result_base_type_kind: &TypeKind,
|
||||
result_base_type_kind: TypeKind,
|
||||
command_source: &'s CommandSourceDetail,
|
||||
session_variables: &SessionVariables,
|
||||
) -> Result<ProcedureBasedCommand<'s>, error::Error> {
|
||||
|
@ -175,7 +175,7 @@ pub(crate) fn build_filter_expression<'s>(
|
||||
},
|
||||
)) => {
|
||||
// Add the target model being used in the usage counts
|
||||
count_model(target_model_name.clone(), usage_counts);
|
||||
count_model(target_model_name, usage_counts);
|
||||
|
||||
let ndc_relationship_name = NDCRelationshipName::new(source_type, relationship_name)?;
|
||||
relationships.insert(
|
||||
|
@ -67,7 +67,7 @@ pub fn generate_ir<'n, 's>(
|
||||
field,
|
||||
field_call,
|
||||
result_type,
|
||||
result_base_type_kind,
|
||||
*result_base_type_kind,
|
||||
source,
|
||||
session_variables,
|
||||
)?,
|
||||
|
@ -213,7 +213,7 @@ pub(crate) fn build_ndc_order_by_element<'s>(
|
||||
);
|
||||
|
||||
// Add the target model being used in the usage counts
|
||||
count_model(target_model_name.clone(), usage_counts);
|
||||
count_model(target_model_name, usage_counts);
|
||||
|
||||
// This map contains the relationships or the columns of the relationship that needs to be used for ordering.
|
||||
let argument_value_map = argument.value.as_object()?;
|
||||
|
@ -88,7 +88,7 @@ pub(crate) fn process_model_predicate<'s>(
|
||||
operator,
|
||||
} => Ok(make_permission_unary_boolean_expression(
|
||||
ndc_column.clone(),
|
||||
operator,
|
||||
*operator,
|
||||
&relationship_paths,
|
||||
)?),
|
||||
resolved::model::ModelPredicate::BinaryFieldComparison {
|
||||
@ -172,7 +172,7 @@ pub(crate) fn process_model_predicate<'s>(
|
||||
);
|
||||
|
||||
// Add the target model being used in the usage counts
|
||||
count_model(relationship_info.target_model_name.clone(), usage_counts);
|
||||
count_model(&relationship_info.target_model_name, usage_counts);
|
||||
|
||||
process_model_predicate(
|
||||
predicate,
|
||||
@ -210,7 +210,7 @@ fn make_permission_binary_boolean_expression(
|
||||
|
||||
fn make_permission_unary_boolean_expression(
|
||||
ndc_column: String,
|
||||
operator: &ndc_models::UnaryComparisonOperator,
|
||||
operator: ndc_models::UnaryComparisonOperator,
|
||||
relationship_paths: &Vec<NDCRelationshipName>,
|
||||
) -> Result<ndc_models::Expression, Error> {
|
||||
let path_elements = super::filter::build_path_elements(relationship_paths);
|
||||
@ -219,7 +219,7 @@ fn make_permission_unary_boolean_expression(
|
||||
name: ndc_column,
|
||||
path: path_elements,
|
||||
},
|
||||
operator: *operator,
|
||||
operator,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -245,7 +245,7 @@ fn generate_command_rootfield_ir<'n, 's>(
|
||||
field,
|
||||
field_call,
|
||||
result_type,
|
||||
result_base_type_kind,
|
||||
*result_base_type_kind,
|
||||
source,
|
||||
session_variables,
|
||||
)?,
|
||||
|
@ -60,7 +60,7 @@ pub(crate) fn select_many_generate_ir<'n, 's>(
|
||||
|
||||
// Add the name of the root model
|
||||
let mut usage_counts = UsagesCounts::new();
|
||||
count_model(model_name.clone(), &mut usage_counts);
|
||||
count_model(model_name, &mut usage_counts);
|
||||
|
||||
for argument in field_call.arguments.values() {
|
||||
match argument.info.generic {
|
||||
|
@ -112,7 +112,7 @@ pub(crate) fn select_one_generate_ir<'n, 's>(
|
||||
|
||||
// Add the name of the root model
|
||||
let mut usage_counts = UsagesCounts::new();
|
||||
count_model(model_name.clone(), &mut usage_counts);
|
||||
count_model(model_name, &mut usage_counts);
|
||||
|
||||
let filter_clause = ResolvedFilterExpression {
|
||||
expression: Some(ndc_models::Expression::And {
|
||||
|
@ -221,7 +221,7 @@ pub(crate) fn generate_model_relationship_ir<'s>(
|
||||
usage_counts: &mut UsagesCounts,
|
||||
) -> Result<FieldSelection<'s>, error::Error> {
|
||||
// Add the target model being used in the usage counts
|
||||
count_model(annotation.model_name.clone(), usage_counts);
|
||||
count_model(&annotation.model_name, usage_counts);
|
||||
let field_call = field.field_call()?;
|
||||
|
||||
let mut limit = None;
|
||||
@ -335,7 +335,7 @@ pub(crate) fn generate_command_relationship_ir<'s>(
|
||||
session_variables: &SessionVariables,
|
||||
usage_counts: &mut UsagesCounts,
|
||||
) -> Result<FieldSelection<'s>, error::Error> {
|
||||
count_command(annotation.command_name.clone(), usage_counts);
|
||||
count_command(&annotation.command_name, usage_counts);
|
||||
let field_call = field.field_call()?;
|
||||
|
||||
let target_source =
|
||||
@ -439,7 +439,7 @@ pub(crate) fn build_local_command_relationship<'s>(
|
||||
field,
|
||||
field_call,
|
||||
&annotation.target_type,
|
||||
&annotation.target_base_type_kind,
|
||||
annotation.target_base_type_kind,
|
||||
&target_source.details,
|
||||
session_variables,
|
||||
)?;
|
||||
@ -581,7 +581,7 @@ pub(crate) fn build_remote_command_relationship<'n, 's>(
|
||||
field,
|
||||
field_call,
|
||||
&annotation.target_type,
|
||||
&annotation.target_base_type_kind,
|
||||
annotation.target_base_type_kind,
|
||||
&target_source.details,
|
||||
session_variables,
|
||||
)?;
|
||||
|
@ -153,7 +153,7 @@ fn build_global_id_fields(
|
||||
|
||||
pub(crate) fn generate_nested_selection<'s>(
|
||||
qualified_type_reference: &QualifiedTypeReference,
|
||||
field_base_type_kind: &TypeKind,
|
||||
field_base_type_kind: TypeKind,
|
||||
field: &normalized_ast::Field<'s, GDS>,
|
||||
data_connector: &'s resolved::data_connector::DataConnectorLink,
|
||||
type_mappings: &'s BTreeMap<Qualified<CustomTypeName>, resolved::types::TypeMapping>,
|
||||
@ -232,7 +232,7 @@ pub(crate) fn generate_selection_set_ir<'s>(
|
||||
})?;
|
||||
let nested_selection = generate_nested_selection(
|
||||
field_type,
|
||||
field_base_type_kind,
|
||||
*field_base_type_kind,
|
||||
field,
|
||||
data_connector,
|
||||
type_mappings,
|
||||
|
@ -131,11 +131,11 @@ fn extend_usage_count(usage_counts: UsagesCounts, all_usage_counts: &mut UsagesC
|
||||
}
|
||||
}
|
||||
|
||||
pub fn count_model(model: Qualified<ModelName>, all_usage_counts: &mut UsagesCounts) {
|
||||
pub fn count_model(model: &Qualified<ModelName>, all_usage_counts: &mut UsagesCounts) {
|
||||
match all_usage_counts
|
||||
.models_used
|
||||
.iter_mut()
|
||||
.find(|element| element.model == model)
|
||||
.find(|element| element.model == *model)
|
||||
{
|
||||
None => {
|
||||
let model_count = ModelCount {
|
||||
@ -150,11 +150,11 @@ pub fn count_model(model: Qualified<ModelName>, all_usage_counts: &mut UsagesCou
|
||||
}
|
||||
}
|
||||
|
||||
pub fn count_command(command: Qualified<CommandName>, all_usage_counts: &mut UsagesCounts) {
|
||||
pub fn count_command(command: &Qualified<CommandName>, all_usage_counts: &mut UsagesCounts) {
|
||||
match all_usage_counts
|
||||
.commands_used
|
||||
.iter_mut()
|
||||
.find(|element| element.command == command)
|
||||
.find(|element| element.command == *command)
|
||||
{
|
||||
None => {
|
||||
let command_count = CommandCount {
|
||||
@ -261,7 +261,7 @@ mod tests {
|
||||
fn test_counter_functions() {
|
||||
let mut aggregator = UsagesCounts::new();
|
||||
count_command(
|
||||
Qualified::new("subgraph".to_string(), CommandName(identifier!("command1"))),
|
||||
&Qualified::new("subgraph".to_string(), CommandName(identifier!("command1"))),
|
||||
&mut aggregator,
|
||||
);
|
||||
assert_eq!(
|
||||
@ -278,7 +278,7 @@ mod tests {
|
||||
}
|
||||
);
|
||||
count_command(
|
||||
Qualified::new("subgraph".to_string(), CommandName(identifier!("command1"))),
|
||||
&Qualified::new("subgraph".to_string(), CommandName(identifier!("command1"))),
|
||||
&mut aggregator,
|
||||
);
|
||||
assert_eq!(
|
||||
@ -295,7 +295,7 @@ mod tests {
|
||||
}
|
||||
);
|
||||
count_model(
|
||||
Qualified::new("subgraph".to_string(), ModelName(identifier!("model1"))),
|
||||
&Qualified::new("subgraph".to_string(), ModelName(identifier!("model1"))),
|
||||
&mut aggregator,
|
||||
);
|
||||
assert_eq!(
|
||||
@ -315,7 +315,7 @@ mod tests {
|
||||
}
|
||||
);
|
||||
count_model(
|
||||
Qualified::new("subgraph".to_string(), ModelName(identifier!("model1"))),
|
||||
&Qualified::new("subgraph".to_string(), ModelName(identifier!("model1"))),
|
||||
&mut aggregator,
|
||||
);
|
||||
assert_eq!(
|
||||
@ -335,7 +335,7 @@ mod tests {
|
||||
}
|
||||
);
|
||||
count_model(
|
||||
Qualified::new("subgraph".to_string(), ModelName(identifier!("model2"))),
|
||||
&Qualified::new("subgraph".to_string(), ModelName(identifier!("model2"))),
|
||||
&mut aggregator,
|
||||
);
|
||||
assert_eq!(
|
||||
@ -367,7 +367,7 @@ mod tests {
|
||||
}
|
||||
);
|
||||
count_command(
|
||||
Qualified::new("subgraph".to_string(), CommandName(identifier!("command2"))),
|
||||
&Qualified::new("subgraph".to_string(), CommandName(identifier!("command2"))),
|
||||
&mut aggregator,
|
||||
);
|
||||
assert_eq!(
|
||||
|
@ -22,7 +22,7 @@ pub const FUNCTION_IR_VALUE_COLUMN_NAME: &str = "__value";
|
||||
/// Executes a NDC operation
|
||||
pub async fn execute_ndc_query<'n, 's>(
|
||||
http_context: &HttpContext,
|
||||
query: ndc_models::QueryRequest,
|
||||
query: &ndc_models::QueryRequest,
|
||||
data_connector: &resolved::data_connector::DataConnectorLink,
|
||||
execution_span_attribute: String,
|
||||
field_span_attribute: String,
|
||||
@ -61,7 +61,7 @@ pub async fn execute_ndc_query<'n, 's>(
|
||||
|
||||
pub(crate) async fn fetch_from_data_connector<'s>(
|
||||
http_context: &HttpContext,
|
||||
query_request: ndc_models::QueryRequest,
|
||||
query_request: &ndc_models::QueryRequest,
|
||||
data_connector: &resolved::data_connector::DataConnectorLink,
|
||||
project_id: Option<ProjectId>,
|
||||
) -> Result<ndc_models::QueryResponse, error::Error> {
|
||||
@ -113,7 +113,7 @@ pub fn append_project_id_to_headers(
|
||||
/// Executes a NDC mutation
|
||||
pub(crate) async fn execute_ndc_mutation<'n, 's, 'ir>(
|
||||
http_context: &HttpContext,
|
||||
query: ndc_models::MutationRequest,
|
||||
query: &ndc_models::MutationRequest,
|
||||
data_connector: &resolved::data_connector::DataConnectorLink,
|
||||
selection_set: &'n normalized_ast::SelectionSet<'s, GDS>,
|
||||
execution_span_attribute: String,
|
||||
@ -192,7 +192,7 @@ pub(crate) async fn execute_ndc_mutation<'n, 's, 'ir>(
|
||||
|
||||
pub(crate) async fn fetch_from_data_connector_mutation<'s>(
|
||||
http_context: &HttpContext,
|
||||
query_request: ndc_models::MutationRequest,
|
||||
query_request: &ndc_models::MutationRequest,
|
||||
data_connector: &resolved::data_connector::DataConnectorLink,
|
||||
project_id: Option<ProjectId>,
|
||||
) -> Result<ndc_models::MutationResponse, error::Error> {
|
||||
|
@ -96,7 +96,7 @@ pub async fn capabilities_get(
|
||||
/// https://hasura.github.io/ndc-spec/specification/explain.html?highlight=%2Fexplain#request
|
||||
pub async fn explain_query_post(
|
||||
configuration: &Configuration,
|
||||
query_request: ndc_models::QueryRequest,
|
||||
query_request: &ndc_models::QueryRequest,
|
||||
) -> Result<ndc_models::ExplainResponse, Error> {
|
||||
let tracer = tracing_util::global_tracer();
|
||||
tracer
|
||||
@ -112,7 +112,7 @@ pub async fn explain_query_post(
|
||||
.map_err(|_| Error::InvalidBaseURL)?;
|
||||
let req_builder = client.request(reqwest::Method::POST, uri);
|
||||
|
||||
execute_request(configuration, req_builder.json(&query_request)).await
|
||||
execute_request(configuration, req_builder.json(query_request)).await
|
||||
})
|
||||
},
|
||||
)
|
||||
@ -124,7 +124,7 @@ pub async fn explain_query_post(
|
||||
/// https://hasura.github.io/ndc-spec/specification/explain.html?highlight=%2Fexplain#request-1
|
||||
pub async fn explain_mutation_post(
|
||||
configuration: &Configuration,
|
||||
mutation_request: ndc_models::MutationRequest,
|
||||
mutation_request: &ndc_models::MutationRequest,
|
||||
) -> Result<ndc_models::ExplainResponse, Error> {
|
||||
let tracer = tracing_util::global_tracer();
|
||||
tracer
|
||||
@ -140,7 +140,7 @@ pub async fn explain_mutation_post(
|
||||
.map_err(|_| Error::InvalidBaseURL)?;
|
||||
let req_builder = client.request(reqwest::Method::POST, uri);
|
||||
|
||||
execute_request(configuration, req_builder.json(&mutation_request)).await
|
||||
execute_request(configuration, req_builder.json(mutation_request)).await
|
||||
})
|
||||
},
|
||||
)
|
||||
@ -152,7 +152,7 @@ pub async fn explain_mutation_post(
|
||||
/// https://hasura.github.io/ndc-spec/specification/mutations/index.html
|
||||
pub async fn mutation_post(
|
||||
configuration: &Configuration,
|
||||
mutation_request: ndc_models::MutationRequest,
|
||||
mutation_request: &ndc_models::MutationRequest,
|
||||
) -> Result<ndc_models::MutationResponse, Error> {
|
||||
let tracer = tracing_util::global_tracer();
|
||||
tracer
|
||||
@ -168,7 +168,7 @@ pub async fn mutation_post(
|
||||
.map_err(|_| Error::InvalidBaseURL)?;
|
||||
let req_builder = client.request(reqwest::Method::POST, uri);
|
||||
|
||||
execute_request(configuration, req_builder.json(&mutation_request)).await
|
||||
execute_request(configuration, req_builder.json(mutation_request)).await
|
||||
})
|
||||
},
|
||||
)
|
||||
@ -180,7 +180,7 @@ pub async fn mutation_post(
|
||||
/// https://hasura.github.io/ndc-spec/specification/queries/index.html
|
||||
pub async fn query_post(
|
||||
configuration: &Configuration,
|
||||
query_request: ndc_models::QueryRequest,
|
||||
query_request: &ndc_models::QueryRequest,
|
||||
) -> Result<ndc_models::QueryResponse, Error> {
|
||||
let tracer = tracing_util::global_tracer();
|
||||
tracer
|
||||
@ -196,7 +196,7 @@ pub async fn query_post(
|
||||
.map_err(|_| Error::InvalidBaseURL)?;
|
||||
let req_builder = client.request(reqwest::Method::POST, uri);
|
||||
|
||||
execute_request(configuration, req_builder.json(&query_request)).await
|
||||
execute_request(configuration, req_builder.json(query_request)).await
|
||||
})
|
||||
},
|
||||
)
|
||||
|
@ -119,7 +119,7 @@ pub struct ExecutionNode<'s> {
|
||||
pub data_connector: &'s resolved::data_connector::DataConnectorLink,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
pub enum ProcessResponseAs<'ir> {
|
||||
Object {
|
||||
is_nullable: bool,
|
||||
@ -606,7 +606,7 @@ async fn execute_query_field_plan<'n, 's, 'ir>(
|
||||
}
|
||||
NodeQueryPlan::NDCQueryExecution(ndc_query) => RootFieldResult::new(
|
||||
&ndc_query.process_response_as.is_nullable(),
|
||||
resolve_ndc_query_execution(http_context, ndc_query, project_id).await,
|
||||
resolve_ndc_query_execution(http_context, &ndc_query, project_id).await,
|
||||
),
|
||||
NodeQueryPlan::RelayNodeSelect(optional_query) => RootFieldResult::new(
|
||||
&optional_query.as_ref().map_or(true, |ndc_query| {
|
||||
@ -827,7 +827,7 @@ fn resolve_schema_field(
|
||||
|
||||
async fn resolve_ndc_query_execution(
|
||||
http_context: &HttpContext,
|
||||
ndc_query: NDCQueryExecution<'_, '_>,
|
||||
ndc_query: &NDCQueryExecution<'_, '_>,
|
||||
project_id: Option<ProjectId>,
|
||||
) -> Result<json::Value, error::Error> {
|
||||
let NDCQueryExecution {
|
||||
@ -839,7 +839,7 @@ async fn resolve_ndc_query_execution(
|
||||
} = ndc_query;
|
||||
let mut response = ndc::execute_ndc_query(
|
||||
http_context,
|
||||
execution_tree.root_node.query,
|
||||
&execution_tree.root_node.query,
|
||||
execution_tree.root_node.data_connector,
|
||||
execution_span_attribute.clone(),
|
||||
field_span_attribute.clone(),
|
||||
@ -851,10 +851,9 @@ async fn resolve_ndc_query_execution(
|
||||
execute_join_locations(
|
||||
http_context,
|
||||
execution_span_attribute,
|
||||
field_span_attribute,
|
||||
&mut response,
|
||||
&process_response_as,
|
||||
execution_tree.remote_executions,
|
||||
process_response_as,
|
||||
&execution_tree.remote_executions,
|
||||
project_id,
|
||||
)
|
||||
.await?;
|
||||
@ -879,7 +878,7 @@ async fn resolve_ndc_mutation_execution(
|
||||
} = ndc_query;
|
||||
let response = ndc::execute_ndc_mutation(
|
||||
http_context,
|
||||
query,
|
||||
&query,
|
||||
data_connector,
|
||||
selection_set,
|
||||
execution_span_attribute,
|
||||
@ -898,6 +897,6 @@ async fn resolve_optional_ndc_select(
|
||||
) -> Result<json::Value, error::Error> {
|
||||
match optional_query {
|
||||
None => Ok(json::Value::Null),
|
||||
Some(ndc_query) => resolve_ndc_query_execution(http_context, ndc_query, project_id).await,
|
||||
Some(ndc_query) => resolve_ndc_query_execution(http_context, &ndc_query, project_id).await,
|
||||
}
|
||||
}
|
||||
|
@ -400,7 +400,7 @@ fn process_command_field_value(
|
||||
pub fn process_response(
|
||||
selection_set: &normalized_ast::SelectionSet<'_, GDS>,
|
||||
rows_sets: Vec<ndc_models::RowSet>,
|
||||
process_response_as: ProcessResponseAs,
|
||||
process_response_as: &ProcessResponseAs,
|
||||
) -> Result<json::Value, error::Error> {
|
||||
let tracer = tracing_util::global_tracer();
|
||||
// Post process the response to add the `__typename` fields
|
||||
|
@ -98,25 +98,24 @@ pub(crate) mod types;
|
||||
#[async_recursion]
|
||||
pub(crate) async fn execute_join_locations<'ir>(
|
||||
http_context: &HttpContext,
|
||||
execution_span_attribute: String,
|
||||
field_span_attribute: String,
|
||||
execution_span_attribute: &str,
|
||||
lhs_response: &mut Vec<ndc_models::RowSet>,
|
||||
lhs_response_type: &ProcessResponseAs,
|
||||
join_locations: JoinLocations<(RemoteJoin<'async_recursion, 'ir>, JoinId)>,
|
||||
join_locations: &JoinLocations<(RemoteJoin<'async_recursion, 'ir>, JoinId)>,
|
||||
project_id: Option<ProjectId>,
|
||||
) -> Result<(), error::Error>
|
||||
where
|
||||
'ir: 'async_recursion,
|
||||
{
|
||||
let tracer = tracing_util::global_tracer();
|
||||
for (key, location) in join_locations.locations {
|
||||
for (key, location) in &join_locations.locations {
|
||||
// collect the join column arguments from the LHS response, also get
|
||||
// the replacement tokens
|
||||
let collect_arg_res = tracer.in_span(
|
||||
"collect_arguments",
|
||||
"Collect arguments for join".into(),
|
||||
SpanVisibility::Internal,
|
||||
|| collect::collect_arguments(lhs_response, lhs_response_type, &key, &location),
|
||||
|| collect::collect_arguments(lhs_response, lhs_response_type, key, location),
|
||||
)?;
|
||||
if let Some(CollectArgumentResult {
|
||||
arguments,
|
||||
@ -142,9 +141,9 @@ where
|
||||
|| {
|
||||
Box::pin(execute_ndc_query(
|
||||
http_context,
|
||||
join_node.target_ndc_ir,
|
||||
&join_node.target_ndc_ir,
|
||||
join_node.target_data_connector,
|
||||
execution_span_attribute.clone(),
|
||||
execution_span_attribute.to_string(),
|
||||
remote_alias.clone(),
|
||||
project_id.clone(),
|
||||
))
|
||||
@ -157,12 +156,10 @@ where
|
||||
if !location.rest.locations.is_empty() {
|
||||
execute_join_locations(
|
||||
http_context,
|
||||
execution_span_attribute.clone(),
|
||||
// TODO: is this field span correct?
|
||||
field_span_attribute.clone(),
|
||||
execution_span_attribute,
|
||||
&mut target_response,
|
||||
&join_node.process_response_as,
|
||||
sub_tree,
|
||||
&sub_tree,
|
||||
project_id.clone(),
|
||||
)
|
||||
.await?;
|
||||
@ -177,7 +174,7 @@ where
|
||||
let rhs_response: HashMap<Argument, ndc_models::RowSet> =
|
||||
join_variables_.into_iter().zip(target_response).collect();
|
||||
|
||||
join::join_responses(&key, &remote_alias, &location, lhs_response, rhs_response)
|
||||
join::join_responses(key, &remote_alias, location, lhs_response, &rhs_response)
|
||||
},
|
||||
)?;
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ fn collect_argument_from_row<'s, 'ir>(
|
||||
.to_string(),
|
||||
})?;
|
||||
|
||||
let rows = rows_from_row_field_value(location_kind, nested_val)?;
|
||||
let rows = rows_from_row_field_value(*location_kind, nested_val)?;
|
||||
if let Some(mut rows) = rows {
|
||||
for (sub_key, sub_location) in &location.rest.locations {
|
||||
for row in rows.iter_mut() {
|
||||
@ -186,7 +186,7 @@ pub(crate) fn create_argument(
|
||||
}
|
||||
|
||||
fn rows_from_row_field_value(
|
||||
location_kind: &LocationKind,
|
||||
location_kind: LocationKind,
|
||||
nested_val: &ndc_models::RowFieldValue,
|
||||
) -> Result<Option<Vec<IndexMap<String, ndc_models::RowFieldValue>>>, error::Error> {
|
||||
let rows: Option<Vec<IndexMap<String, ndc_models::RowFieldValue>>> = match location_kind {
|
||||
|
@ -21,7 +21,7 @@ pub(crate) fn join_responses(
|
||||
remote_alias: &str,
|
||||
location: &Location<(RemoteJoin<'_, '_>, JoinId)>,
|
||||
lhs_response: &mut [ndc_models::RowSet],
|
||||
rhs_response: HashMap<BTreeMap<String, ValueExt>, ndc_models::RowSet>,
|
||||
rhs_response: &HashMap<BTreeMap<String, ValueExt>, ndc_models::RowSet>,
|
||||
) -> Result<(), error::Error> {
|
||||
for row_set in lhs_response.iter_mut() {
|
||||
if let Some(rows) = row_set.rows.as_mut() {
|
||||
@ -44,8 +44,8 @@ pub(crate) fn join_responses(
|
||||
location,
|
||||
&mut command_row_parsed,
|
||||
remote_alias.to_owned(),
|
||||
alias.to_owned(),
|
||||
&rhs_response,
|
||||
alias,
|
||||
rhs_response,
|
||||
)?;
|
||||
*command_row = json::to_value(command_row_parsed)?;
|
||||
}
|
||||
@ -59,8 +59,8 @@ pub(crate) fn join_responses(
|
||||
location,
|
||||
&mut command_row,
|
||||
remote_alias.to_owned(),
|
||||
alias.to_owned(),
|
||||
&rhs_response,
|
||||
alias,
|
||||
rhs_response,
|
||||
)?;
|
||||
*x = ndc_models::RowFieldValue(json::to_value(command_row)?);
|
||||
}
|
||||
@ -80,8 +80,8 @@ pub(crate) fn join_responses(
|
||||
location,
|
||||
row,
|
||||
remote_alias.to_owned(),
|
||||
alias.to_owned(),
|
||||
&rhs_response,
|
||||
alias,
|
||||
rhs_response,
|
||||
)?;
|
||||
}
|
||||
};
|
||||
@ -97,7 +97,7 @@ fn follow_location_and_insert_value(
|
||||
location: &Location<(RemoteJoin<'_, '_>, JoinId)>,
|
||||
row: &mut IndexMap<String, ndc_models::RowFieldValue>,
|
||||
remote_alias: String,
|
||||
key: String,
|
||||
key: &str,
|
||||
rhs_response: &HashMap<Argument, ndc_models::RowSet>,
|
||||
) -> Result<(), error::Error> {
|
||||
match &location.join_node {
|
||||
@ -109,7 +109,7 @@ fn follow_location_and_insert_value(
|
||||
}
|
||||
JoinNode::Local(location_kind) => {
|
||||
let row_field_val =
|
||||
row.get_mut(&key)
|
||||
row.get_mut(key)
|
||||
.ok_or(error::InternalEngineError::InternalGeneric {
|
||||
description: "unexpected: could not find {key} in row".into(),
|
||||
})?;
|
||||
@ -130,7 +130,7 @@ fn follow_location_and_insert_value(
|
||||
sub_location,
|
||||
inner_row,
|
||||
remote_alias.to_string(),
|
||||
sub_key.to_string(),
|
||||
sub_key,
|
||||
rhs_response,
|
||||
)?;
|
||||
}
|
||||
@ -152,7 +152,7 @@ fn follow_location_and_insert_value(
|
||||
sub_location,
|
||||
inner_row,
|
||||
remote_alias.to_string(),
|
||||
sub_key.to_string(),
|
||||
sub_key,
|
||||
rhs_response,
|
||||
)?
|
||||
}
|
||||
@ -171,7 +171,7 @@ fn follow_location_and_insert_value(
|
||||
sub_location,
|
||||
&mut inner_row,
|
||||
remote_alias.to_string(),
|
||||
sub_key.to_string(),
|
||||
sub_key,
|
||||
rhs_response,
|
||||
)?
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ pub(crate) fn generate_select_many_arguments(
|
||||
// insert limit argument
|
||||
if let Some(limit_field) = &model.graphql_api.limit_field {
|
||||
let limit_argument = generate_int_input_argument(
|
||||
limit_field.field_name.to_string(),
|
||||
limit_field.field_name.as_str(),
|
||||
Annotation::Input(types::InputAnnotation::Model(
|
||||
ModelInputAnnotation::ModelLimitArgument,
|
||||
)),
|
||||
@ -46,7 +46,7 @@ pub(crate) fn generate_select_many_arguments(
|
||||
// insert offset argument
|
||||
if let Some(offset_field) = &model.graphql_api.offset_field {
|
||||
let offset_argument = generate_int_input_argument(
|
||||
offset_field.field_name.to_string(),
|
||||
offset_field.field_name.as_str(),
|
||||
Annotation::Input(types::InputAnnotation::Model(
|
||||
ModelInputAnnotation::ModelOffsetArgument,
|
||||
)),
|
||||
@ -153,10 +153,10 @@ pub(crate) fn select_many_field(
|
||||
|
||||
/// Generates the input field for the arguments which are of type int.
|
||||
pub(crate) fn generate_int_input_argument(
|
||||
name: String,
|
||||
name: &str,
|
||||
annotation: Annotation,
|
||||
) -> Result<gql_schema::InputField<GDS>, crate::schema::Error> {
|
||||
let input_field_name = resolved::types::mk_name(&name)?;
|
||||
let input_field_name = resolved::types::mk_name(name)?;
|
||||
Ok(gql_schema::InputField::new(
|
||||
input_field_name,
|
||||
None,
|
||||
|
@ -151,7 +151,7 @@ pub enum ApolloFederationRootFields {
|
||||
Service,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, Display, Copy)]
|
||||
#[derive(Serialize, Deserialize, Clone, Copy, Debug, PartialEq, Eq, Display)]
|
||||
pub enum TypeKind {
|
||||
Scalar,
|
||||
Object,
|
||||
|
@ -1,5 +1,5 @@
|
||||
pub mod async_map;
|
||||
pub mod json_ext;
|
||||
|
||||
use serde::{de::DeserializeOwned, ser::SerializeMap, Deserialize, Serialize};
|
||||
use std::{collections::HashMap, hash::Hash};
|
||||
|
||||
|
@ -1,31 +0,0 @@
|
||||
use axum::async_trait;
|
||||
use futures::Future;
|
||||
use std::pin::Pin;
|
||||
|
||||
#[async_trait]
|
||||
pub trait AsyncMap<T, U, F>
|
||||
where
|
||||
F: FnOnce(T) -> Pin<Box<dyn Future<Output = U> + Send>> + Send,
|
||||
{
|
||||
type Output;
|
||||
async fn async_map(self, map: F) -> Self::Output;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<T, U, F> AsyncMap<T, U, F> for std::option::Option<T>
|
||||
where
|
||||
T: Send,
|
||||
U: Send,
|
||||
F: 'static + FnOnce(T) -> Pin<Box<dyn Future<Output = U> + Send>> + Send,
|
||||
{
|
||||
type Output = std::option::Option<U>;
|
||||
async fn async_map(self, map: F) -> Self::Output {
|
||||
match self {
|
||||
Some(t) => {
|
||||
let u = map(t).await;
|
||||
Some(u)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
}
|
@ -427,13 +427,13 @@ fn in_curly_braces(strings: Vec<String>) -> String {
|
||||
"{{\n{}\n}}",
|
||||
strings
|
||||
.into_iter()
|
||||
.map(with_indent)
|
||||
.map(|s| with_indent(&s))
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n")
|
||||
)
|
||||
}
|
||||
|
||||
fn with_indent(sdl: String) -> String {
|
||||
fn with_indent(sdl: &str) -> String {
|
||||
sdl.lines()
|
||||
.map(|l| format!(" {}", l))
|
||||
.collect::<Vec<String>>()
|
||||
|
@ -596,18 +596,13 @@ fn collect_accessible_types<S: schema::SchemaContext>(
|
||||
// insert query root always to accessible types
|
||||
// then collect other accessible types
|
||||
accessible_types.insert(schema.query_type.clone());
|
||||
collect_accessible_types_(
|
||||
namespace,
|
||||
schema,
|
||||
schema.query_type.clone(),
|
||||
accessible_types,
|
||||
);
|
||||
collect_accessible_types_(namespace, schema, &schema.query_type, accessible_types);
|
||||
|
||||
// insert mutation root always to accessible types if it exists in schema
|
||||
// and collect related accessible types
|
||||
if let Some(mutation_type) = &schema.mutation_type {
|
||||
accessible_types.insert(mutation_type.clone());
|
||||
collect_accessible_types_(namespace, schema, mutation_type.clone(), accessible_types);
|
||||
collect_accessible_types_(namespace, schema, mutation_type, accessible_types);
|
||||
}
|
||||
}
|
||||
|
||||
@ -615,10 +610,10 @@ fn collect_accessible_types<S: schema::SchemaContext>(
|
||||
fn collect_accessible_types_<S: schema::SchemaContext>(
|
||||
namespace: &S::Namespace,
|
||||
schema: &schema::Schema<S>,
|
||||
current_type_name: ast::TypeName,
|
||||
current_type_name: &ast::TypeName,
|
||||
accessible_types: &mut HashSet<TypeName>,
|
||||
) {
|
||||
let current_type = schema.types.get(¤t_type_name);
|
||||
let current_type = schema.types.get(current_type_name);
|
||||
match current_type {
|
||||
Some(schema::TypeInfo::Object(object)) => {
|
||||
for namespaced_fields in object.fields.values() {
|
||||
@ -631,7 +626,7 @@ fn collect_accessible_types_<S: schema::SchemaContext>(
|
||||
collect_accessible_types_(
|
||||
namespace,
|
||||
schema,
|
||||
input_field_type_name.clone(),
|
||||
input_field_type_name,
|
||||
accessible_types,
|
||||
)
|
||||
}
|
||||
@ -643,7 +638,7 @@ fn collect_accessible_types_<S: schema::SchemaContext>(
|
||||
collect_accessible_types_(
|
||||
namespace,
|
||||
schema,
|
||||
field_type_name.clone(),
|
||||
field_type_name,
|
||||
accessible_types,
|
||||
);
|
||||
}
|
||||
@ -659,7 +654,7 @@ fn collect_accessible_types_<S: schema::SchemaContext>(
|
||||
collect_accessible_types_(
|
||||
namespace,
|
||||
schema,
|
||||
input_field_type_name.clone(),
|
||||
input_field_type_name,
|
||||
accessible_types,
|
||||
)
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ pub mod string;
|
||||
// 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
// ];
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum Punctuation {
|
||||
// !
|
||||
Bang,
|
||||
|
@ -1,4 +1,3 @@
|
||||
use core::slice::Iter;
|
||||
use std::cmp::min;
|
||||
use std::str::from_utf8_unchecked;
|
||||
use thiserror::Error;
|
||||
@ -75,8 +74,8 @@ fn num_bytes_in_char(first_byte: u8) -> usize {
|
||||
}
|
||||
}
|
||||
|
||||
fn is_whitespace(b: &u8) -> bool {
|
||||
*b == b' ' || *b == b'\t'
|
||||
fn is_whitespace(b: u8) -> bool {
|
||||
b == b' ' || b == b'\t'
|
||||
}
|
||||
|
||||
struct Line {
|
||||
@ -94,8 +93,8 @@ impl Line {
|
||||
self.start = min(self.start + count, self.end);
|
||||
}
|
||||
|
||||
fn iter<'a>(&self, bytes: &'a [u8]) -> Iter<'a, u8> {
|
||||
bytes[self.start..self.end].iter()
|
||||
fn iter<'bytes>(&self, bytes: &'bytes [u8]) -> impl Iterator<Item = u8> + 'bytes {
|
||||
bytes[self.start..self.end].iter().copied()
|
||||
}
|
||||
|
||||
fn append_to(&self, bytes: &[u8], s: &mut String) {
|
||||
@ -179,7 +178,7 @@ fn parse_block_string(bytes: &[u8]) -> Result<(String, Consumed, usize), (Error,
|
||||
.skip(1)
|
||||
// Don't consider whitespace only lines
|
||||
.filter_map(|line| {
|
||||
let indent = line.iter(bytes).take_while(|b| is_whitespace(b)).count();
|
||||
let indent = line.iter(bytes).take_while(|b| is_whitespace(*b)).count();
|
||||
(indent < line.len()).then_some(indent)
|
||||
})
|
||||
// The minimum of all indents would be the common indent
|
||||
|
@ -427,7 +427,7 @@ impl<'a> Parser<'a> {
|
||||
{
|
||||
let start = self.parse_punctuation(start_token)?;
|
||||
let mut items = vec![];
|
||||
let end_token_ = &lexer::Token::Punctuation(end_token.clone());
|
||||
let end_token_ = &lexer::Token::Punctuation(end_token);
|
||||
while !self.is_next_token(end_token_) {
|
||||
items.push(parse(self)?);
|
||||
}
|
||||
@ -451,7 +451,7 @@ impl<'a> Parser<'a> {
|
||||
where
|
||||
F: Fn(&mut Self) -> Result<T>,
|
||||
{
|
||||
if self.is_next_token(&lexer::Token::Punctuation(start_token.clone())) {
|
||||
if self.is_next_token(&lexer::Token::Punctuation(start_token)) {
|
||||
Ok(Some(self.parse_delimited_list_helper(
|
||||
start_token,
|
||||
end_token,
|
||||
|
@ -105,7 +105,7 @@ pub fn check_fragment_cycles<'q>(
|
||||
executable::Selection::FragmentSpread(spread) => {
|
||||
let fragment_name = &spread.fragment_name.item;
|
||||
if fragment_path.contains(fragment_name) {
|
||||
let mut path = fragment_path.iter().cloned().cloned().collect::<Vec<_>>();
|
||||
let mut path = fragment_path.iter().copied().cloned().collect::<Vec<_>>();
|
||||
path.push(fragment_name.clone());
|
||||
return Err(Error::CycleDetected(path));
|
||||
}
|
||||
|
@ -230,7 +230,7 @@ where
|
||||
directives,
|
||||
};
|
||||
if cannonical_field.reachable {
|
||||
field_calls.insert(reachability.iter().cloned().cloned().collect(), field_call);
|
||||
field_calls.insert(reachability.iter().copied().cloned().collect(), field_call);
|
||||
}
|
||||
alias_selection_sets.push((reachability, selection_sets));
|
||||
}
|
||||
|
@ -10,8 +10,8 @@ use std::{
|
||||
use expect_test::expect_file;
|
||||
|
||||
#[cfg(test)]
|
||||
fn test_parser_for_schema(schema_path: PathBuf) -> Result<(), io::Error> {
|
||||
let schema = fs::read_to_string(schema_path.as_path())?;
|
||||
fn test_parser_for_schema(schema_path: &Path) -> Result<(), io::Error> {
|
||||
let schema = fs::read_to_string(schema_path)?;
|
||||
let expected_ast_path = schema_path.with_extension("ast.txt");
|
||||
match fs::read_to_string(expected_ast_path.as_path()) {
|
||||
Err(io_error) => {
|
||||
@ -62,7 +62,7 @@ fn test_schema_parser() -> Result<(), io::Error> {
|
||||
dir_entry.path().extension().map(|e| e.to_str()),
|
||||
Some(Some("graphql"))
|
||||
) {
|
||||
test_parser_for_schema(dir_entry.path())?;
|
||||
test_parser_for_schema(&dir_entry.path())?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
@ -33,19 +33,19 @@ impl EnumTagType {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn impl_opendd_enum(impl_style: EnumImplStyle, variants: Vec<EnumVariant<'_>>) -> TraitImpls {
|
||||
pub fn impl_opendd_enum(impl_style: EnumImplStyle, variants: &[EnumVariant<'_>]) -> TraitImpls {
|
||||
let (tag, deserialize_exp, json_schema_expr) = match impl_style {
|
||||
EnumImplStyle::UntaggedWithKind => (
|
||||
"kind".to_string(),
|
||||
impl_deserialize_as_untagged(&variants),
|
||||
impl_json_schema_untagged(&variants),
|
||||
impl_deserialize_as_untagged(variants),
|
||||
impl_json_schema_untagged(variants),
|
||||
),
|
||||
EnumImplStyle::Tagged(tag_kind) => {
|
||||
let tag_type = EnumTagType::new(&tag_kind);
|
||||
(
|
||||
tag_type.generate_tag(),
|
||||
impl_deserialize_as_tagged(&variants, &tag_type),
|
||||
impl_json_schema_tagged(&variants, &tag_type),
|
||||
impl_deserialize_as_tagged(variants, &tag_type),
|
||||
impl_json_schema_tagged(variants, &tag_type),
|
||||
)
|
||||
}
|
||||
};
|
||||
@ -228,8 +228,8 @@ fn impl_json_schema_untagged(variants: &[EnumVariant<'_>]) -> proc_macro2::Token
|
||||
open_dds::traits::gen_subschema_for::<#ty>(gen)
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
helpers::variant_subschemas(false, schemas)
|
||||
.collect::<Vec<_>>();
|
||||
helpers::variant_subschemas(false, &schemas)
|
||||
}
|
||||
|
||||
fn impl_json_schema_tagged(
|
||||
@ -303,9 +303,9 @@ fn impl_json_schema_tagged(
|
||||
}}
|
||||
|
||||
})
|
||||
.collect();
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
helpers::variant_subschemas(unique_names.len() == count, variant_schemas)
|
||||
helpers::variant_subschemas(unique_names.len() == count, &variant_schemas)
|
||||
}
|
||||
EnumTagType::Adjacent { tag, content } => {
|
||||
let mut unique_names = std::collections::HashSet::new();
|
||||
@ -317,7 +317,7 @@ fn impl_json_schema_tagged(
|
||||
count += 1;
|
||||
|
||||
let name = &variant.renamed_variant;
|
||||
let schema = helpers::schema_object(quote! {
|
||||
let schema = helpers::schema_object("e! {
|
||||
instance_type: Some(schemars::schema::InstanceType::String.into()),
|
||||
enum_values: Some(vec![#name.into()]),
|
||||
});
|
||||
@ -325,7 +325,7 @@ fn impl_json_schema_tagged(
|
||||
let content_schema = quote! {
|
||||
open_dds::traits::gen_subschema_for::<#ty>(gen)
|
||||
};
|
||||
helpers::schema_object(quote! {
|
||||
helpers::schema_object("e! {
|
||||
instance_type: Some(schemars::schema::InstanceType::Object.into()),
|
||||
object: Some(Box::new(schemars::schema::ObjectValidation {
|
||||
properties: {
|
||||
@ -345,9 +345,9 @@ fn impl_json_schema_tagged(
|
||||
})),
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
helpers::variant_subschemas(unique_names.len() == count, variant_schemas)
|
||||
helpers::variant_subschemas(unique_names.len() == count, &variant_schemas)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -84,7 +84,7 @@ fn none_if_empty(s: String) -> Option<String> {
|
||||
}
|
||||
|
||||
pub fn apply_schema_metadata(
|
||||
schema_expr: proc_macro2::TokenStream,
|
||||
schema_expr: &proc_macro2::TokenStream,
|
||||
json_schema_metadata: JsonSchemaMetadata,
|
||||
) -> proc_macro2::TokenStream {
|
||||
let title = json_schema_metadata.title;
|
||||
@ -117,7 +117,7 @@ pub fn apply_schema_metadata(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn schema_object(properties: proc_macro2::TokenStream) -> proc_macro2::TokenStream {
|
||||
pub fn schema_object(properties: &proc_macro2::TokenStream) -> proc_macro2::TokenStream {
|
||||
quote! {
|
||||
schemars::schema::Schema::Object(
|
||||
schemars::schema::SchemaObject {
|
||||
@ -131,17 +131,17 @@ pub fn schema_object(properties: proc_macro2::TokenStream) -> proc_macro2::Token
|
||||
/// be done for most tagging regimes by checking that all tag names are unique.
|
||||
pub fn variant_subschemas(
|
||||
unique: bool,
|
||||
schemas: Vec<proc_macro2::TokenStream>,
|
||||
schemas: &[proc_macro2::TokenStream],
|
||||
) -> proc_macro2::TokenStream {
|
||||
if unique {
|
||||
schema_object(quote! {
|
||||
schema_object("e! {
|
||||
subschemas: Some(Box::new(schemars::schema::SubschemaValidation {
|
||||
one_of: Some(vec![#(#schemas),*]),
|
||||
..Default::default()
|
||||
})),
|
||||
})
|
||||
} else {
|
||||
schema_object(quote! {
|
||||
schema_object("e! {
|
||||
subschemas: Some(Box::new(schemars::schema::SubschemaValidation {
|
||||
any_of: Some(vec![#(#schemas),*]),
|
||||
..Default::default()
|
||||
|
@ -12,14 +12,14 @@ use crate::container::*;
|
||||
#[proc_macro_derive(OpenDd, attributes(opendd))]
|
||||
pub fn derive(input_tok: TokenStream) -> TokenStream {
|
||||
let input = parse_macro_input!(input_tok as DeriveInput);
|
||||
impl_opendd(input)
|
||||
impl_opendd(&input)
|
||||
.unwrap_or_else(syn::Error::into_compile_error)
|
||||
.into()
|
||||
}
|
||||
|
||||
fn impl_opendd(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
|
||||
fn impl_opendd(input: &DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
|
||||
let name = &input.ident;
|
||||
let cont = Container::from_derive_input(&input)?;
|
||||
let cont = Container::from_derive_input(input)?;
|
||||
let TraitImpls {
|
||||
deserialize: impl_deserialize,
|
||||
json_schema,
|
||||
@ -30,13 +30,13 @@ fn impl_opendd(input: DeriveInput) -> syn::Result<proc_macro2::TokenStream> {
|
||||
})
|
||||
}
|
||||
Data::Enum(EnumData::Impl(impl_style, enum_variants)) => {
|
||||
enum_derive::impl_opendd_enum(impl_style, enum_variants)
|
||||
enum_derive::impl_opendd_enum(impl_style, &enum_variants)
|
||||
}
|
||||
Data::Struct(struct_data) => struct_derive::impl_opendd_struct(name, struct_data),
|
||||
Data::Struct(struct_data) => struct_derive::impl_opendd_struct(name, &struct_data),
|
||||
};
|
||||
let json_schema_metadata = cont.json_schema_metadata;
|
||||
let schema_name = &json_schema_metadata.schema_name.to_string();
|
||||
let impl_json_schema = helpers::apply_schema_metadata(json_schema, json_schema_metadata);
|
||||
let impl_json_schema = helpers::apply_schema_metadata(&json_schema, json_schema_metadata);
|
||||
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
|
||||
Ok(quote! {
|
||||
impl #impl_generics open_dds::traits::OpenDd for #name #ty_generics #where_clause {
|
||||
|
@ -3,7 +3,7 @@ use quote::quote;
|
||||
use crate::container::*;
|
||||
use crate::helpers;
|
||||
|
||||
pub fn impl_opendd_struct(name: &syn::Ident, data: StructData<'_>) -> TraitImpls {
|
||||
pub fn impl_opendd_struct(name: &syn::Ident, data: &StructData<'_>) -> TraitImpls {
|
||||
let (impl_deserialize, json_schema_expr) = match &data {
|
||||
StructData::Newtype(field) => (
|
||||
quote! {
|
||||
@ -185,7 +185,7 @@ fn impl_json_schema_named_fields(fields: &[NamedField<'_>]) -> proc_macro2::Toke
|
||||
let mut required = schemars::Set::new();
|
||||
};
|
||||
|
||||
let schema_object = helpers::schema_object(quote! {
|
||||
let schema_object = helpers::schema_object("e! {
|
||||
instance_type: Some(schemars::schema::InstanceType::Object.into()),
|
||||
object: Some(Box::new(schemars::schema::ObjectValidation{
|
||||
properties,
|
||||
|
@ -1,15 +1,19 @@
|
||||
use crate::traceable::{ErrorVisibility, Traceable, TraceableError};
|
||||
use http::HeaderMap;
|
||||
use opentelemetry::{
|
||||
global::{self, BoxedTracer},
|
||||
trace::{get_active_span, FutureExt, SpanRef, TraceContextExt, Tracer as OtelTracer},
|
||||
Context, Key,
|
||||
};
|
||||
use opentelemetry_http::HeaderExtractor;
|
||||
use std::borrow::Cow;
|
||||
use std::{collections::HashMap, future::Future, pin::Pin};
|
||||
use std::collections::HashMap;
|
||||
use std::future::Future;
|
||||
use std::pin::Pin;
|
||||
|
||||
#[derive(derive_more::Display)]
|
||||
use http::HeaderMap;
|
||||
use opentelemetry::global::{self, BoxedTracer};
|
||||
use opentelemetry::trace::{
|
||||
get_active_span, FutureExt, SpanRef, TraceContextExt, Tracer as OtelTracer,
|
||||
};
|
||||
use opentelemetry::{Context, Key};
|
||||
use opentelemetry_http::HeaderExtractor;
|
||||
|
||||
use crate::traceable::{ErrorVisibility, Traceable, TraceableError};
|
||||
|
||||
#[derive(Clone, Copy, derive_more::Display)]
|
||||
pub enum SpanVisibility {
|
||||
#[display(fmt = "internal")]
|
||||
Internal,
|
||||
@ -17,6 +21,7 @@ pub enum SpanVisibility {
|
||||
User,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum AttributeVisibility {
|
||||
Default,
|
||||
Internal,
|
||||
|
Loading…
Reference in New Issue
Block a user