mirror of
https://github.com/hasura/graphql-engine.git
synced 2024-12-14 17:02:49 +03:00
Idiomatic iteration patterns. (#632)
Fix some warnings flagged by Clippy. 1. Elide `.into_iter()` where it's unnecessary. 2. Favor `&` over `.iter()`. 3. Use `.values()` on maps instead of discarding keys by destructuring. 4. Avoid `::from_iter(…)` in favor of `.collect()`. I also replaced a call to `.cloned()` with `.copied()`. V3_GIT_ORIGIN_REV_ID: 7d39665b0cd04f5bae9405c0ff5f044f57433f32
This commit is contained in:
parent
85a2eb7a51
commit
79074bef84
@ -32,9 +32,6 @@ wildcard_imports = "allow"
|
||||
# disable these for now, but we should probably fix them
|
||||
cast_precision_loss = "allow"
|
||||
default_trait_access = "allow"
|
||||
explicit_into_iter_loop = "allow"
|
||||
explicit_iter_loop = "allow"
|
||||
from_iter_instead_of_collect = "allow"
|
||||
if_not_else = "allow"
|
||||
ignored_unit_patterns = "allow"
|
||||
implicit_clone = "allow"
|
||||
|
@ -10,7 +10,7 @@ fn build_allowed_roles(
|
||||
hasura_claims: &HasuraClaims,
|
||||
) -> Result<HashMap<Role, auth_base::RoleAuthorization>, Error> {
|
||||
let mut allowed_roles = HashMap::new();
|
||||
for role in hasura_claims.allowed_roles.iter() {
|
||||
for role in &hasura_claims.allowed_roles {
|
||||
let role_authorization = auth_base::RoleAuthorization {
|
||||
role: role.clone(),
|
||||
// Note: The same `custom_claims` is being cloned
|
||||
|
@ -584,7 +584,7 @@ pub(crate) async fn decode_and_parse_hasura_claims(
|
||||
let mut custom_claims = HashMap::new();
|
||||
|
||||
claims_mappings.custom_claims.map(|custom_claim_mappings| {
|
||||
for (claim_name, claims_mapping_entry) in custom_claim_mappings.into_iter() {
|
||||
for (claim_name, claims_mapping_entry) in custom_claim_mappings {
|
||||
let claim_value = get_claims_mapping_entry_value(
|
||||
claim_name.to_string(),
|
||||
claims_mapping_entry,
|
||||
|
@ -170,7 +170,7 @@ async fn make_auth_hook_request(
|
||||
let http_request_builder = match auth_hook_config.method {
|
||||
AuthHookMethod::Get => {
|
||||
let mut auth_hook_headers = tracing_util::get_trace_headers();
|
||||
for (header_name, header_value) in client_headers.iter() {
|
||||
for (header_name, header_value) in client_headers {
|
||||
if !COMMON_CLIENT_HEADERS_TO_IGNORE.contains(header_name.as_str()) {
|
||||
auth_hook_headers.insert(header_name, header_value.clone());
|
||||
}
|
||||
@ -182,7 +182,7 @@ async fn make_auth_hook_request(
|
||||
}
|
||||
AuthHookMethod::Post => {
|
||||
let mut auth_hook_headers = HashMap::new();
|
||||
for (header_name, header_value) in client_headers.iter() {
|
||||
for (header_name, header_value) in client_headers {
|
||||
auth_hook_headers.insert(
|
||||
header_name.to_string(),
|
||||
header_value
|
||||
@ -231,7 +231,7 @@ async fn make_auth_hook_request(
|
||||
let auth_hook_response: HashMap<String, String> =
|
||||
response.json().await.map_err(InternalError::ReqwestError)?;
|
||||
let mut session_variables = HashMap::new();
|
||||
for (k, v) in auth_hook_response.iter() {
|
||||
for (k, v) in &auth_hook_response {
|
||||
match SessionVariable::from_str(k) {
|
||||
Ok(session_variable) => {
|
||||
session_variables
|
||||
|
@ -59,7 +59,7 @@ pub(crate) fn rows(
|
||||
|
||||
let mut actors_by_movie = vec![];
|
||||
|
||||
for (_id, actor) in state.actors.iter() {
|
||||
for actor in state.actors.values() {
|
||||
let actor_movie_id_int = get_actor_movie_id(actor)?;
|
||||
if actor_movie_id_int == movie_id_int {
|
||||
actors_by_movie.push(actor.clone())
|
||||
|
@ -29,7 +29,7 @@ pub(crate) fn rows(
|
||||
|
||||
let mut actor_names_by_movie = vec![];
|
||||
|
||||
for (_id, actor) in state.actors.iter() {
|
||||
for actor in state.actors.values() {
|
||||
let actor_movie_id = actor.get("movie_id").ok_or((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(ndc_models::ErrorResponse {
|
||||
|
@ -52,7 +52,7 @@ pub(crate) fn rows(
|
||||
|
||||
// this is very limited, we can only compare id columns with "_eq"
|
||||
if column == "id" && operator == "_eq" {
|
||||
for (_id, actor) in state.actors.iter() {
|
||||
for actor in state.actors.values() {
|
||||
let actor_value = serde_json::to_value(actor).map_err(|_| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
|
@ -61,7 +61,7 @@ pub(crate) fn rows(
|
||||
|
||||
let mut actors_by_movie = vec![];
|
||||
|
||||
for (_id, actor) in state.actors.iter() {
|
||||
for actor in state.actors.values() {
|
||||
let actor_movie_id_int = get_actor_movie_id(actor)?;
|
||||
|
||||
if actor_movie_id_int == movie_id_int {
|
||||
|
@ -42,7 +42,7 @@ pub(crate) fn rows(
|
||||
))?;
|
||||
let mut actors_by_movie_id_bounds = vec![];
|
||||
|
||||
for (_id, actor) in state.actors.iter() {
|
||||
for actor in state.actors.values() {
|
||||
let movie_id = actor.get("movie_id").ok_or((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
Json(ndc_models::ErrorResponse {
|
||||
|
@ -23,7 +23,7 @@ pub(crate) fn function_info() -> ndc_models::FunctionInfo {
|
||||
|
||||
pub(crate) fn rows(state: &AppState) -> Result<Vec<Row>> {
|
||||
let mut actors = vec![];
|
||||
for (_id, actor) in state.actors.iter() {
|
||||
for actor in state.actors.values() {
|
||||
let actor_value = serde_json::to_value(actor).map_err(|_| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
|
@ -23,7 +23,7 @@ pub(crate) fn function_info() -> ndc_models::FunctionInfo {
|
||||
|
||||
pub(crate) fn rows(state: &AppState) -> Result<Vec<Row>> {
|
||||
let mut movies = vec![];
|
||||
for (_id, movie) in state.movies.iter() {
|
||||
for movie in state.movies.values() {
|
||||
let movie_value = serde_json::to_value(movie).map_err(|_| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
|
@ -13,7 +13,7 @@ pub fn execute_mutation_request(
|
||||
) -> Result<ndc_models::MutationResponse> {
|
||||
let mut operation_results = vec![];
|
||||
|
||||
for operation in request.operations.iter() {
|
||||
for operation in &request.operations {
|
||||
let operation_result =
|
||||
execute_mutation_operation(state, &request.collection_relationships, operation)?;
|
||||
operation_results.push(operation_result);
|
||||
|
@ -82,8 +82,10 @@ pub(crate) fn execute(
|
||||
let old_row = current_state.get(&id_int);
|
||||
match &old_row {
|
||||
Some(actor_obj) => {
|
||||
let mut new_row =
|
||||
BTreeMap::from_iter(actor_obj.iter().map(|(k, v)| (k.clone(), v.clone())));
|
||||
let mut new_row = actor_obj
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
new_row.insert("name".into(), name.clone());
|
||||
state.actors.insert(id_int, new_row);
|
||||
let output_row = state.actors.get(&id_int);
|
||||
|
@ -60,8 +60,7 @@ pub(crate) fn execute(
|
||||
))?;
|
||||
let actor_name_uppercase = actor_name_str.to_uppercase();
|
||||
let actor_name_uppercase_value = serde_json::Value::String(actor_name_uppercase);
|
||||
let mut new_row =
|
||||
BTreeMap::from_iter(actor_obj.iter().map(|(k, v)| (k.clone(), v.clone())));
|
||||
let mut new_row = (*actor_obj).clone();
|
||||
new_row.insert("name".into(), actor_name_uppercase_value.clone());
|
||||
state.actors.insert(id_int, new_row);
|
||||
let old_row = state.actors.get(&id_int);
|
||||
|
@ -15,7 +15,7 @@ pub(crate) fn execute(
|
||||
) -> Result<serde_json::Value> {
|
||||
let mut actors_list = vec![];
|
||||
let current_state = state.actors.clone();
|
||||
for (actor_id, actor) in current_state.iter() {
|
||||
for (actor_id, actor) in ¤t_state {
|
||||
let id_int = *actor_id;
|
||||
let actor_name = actor.get("name").ok_or((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
@ -34,8 +34,7 @@ pub(crate) fn execute(
|
||||
let actor_name_uppercase = actor_name_str.to_uppercase();
|
||||
let actor_name_uppercase_value = serde_json::Value::String(actor_name_uppercase);
|
||||
|
||||
let old_row = actor;
|
||||
let mut new_row = BTreeMap::from_iter(old_row.iter().map(|(k, v)| (k.clone(), v.clone())));
|
||||
let mut new_row = actor.clone();
|
||||
new_row.insert("name".into(), actor_name_uppercase_value.clone());
|
||||
state.actors.insert(id_int, new_row.clone());
|
||||
let actor_json = serde_json::to_value(new_row).map_err(|_| {
|
||||
|
@ -1,5 +1,3 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use axum::{http::StatusCode, Json};
|
||||
use ndc_models;
|
||||
|
||||
@ -8,7 +6,7 @@ use crate::{query::Result, state::AppState};
|
||||
pub(crate) fn execute(state: &mut AppState) -> Result<serde_json::Value> {
|
||||
let mut actors_list = vec![];
|
||||
let current_state = state.actors.clone();
|
||||
for (actor_id, actor) in current_state.iter() {
|
||||
for (actor_id, actor) in ¤t_state {
|
||||
let id_int = *actor_id;
|
||||
let actor_name = actor.get("name").ok_or((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
@ -27,8 +25,7 @@ pub(crate) fn execute(state: &mut AppState) -> Result<serde_json::Value> {
|
||||
let actor_name_uppercase = actor_name_str.to_uppercase();
|
||||
let actor_name_uppercase_value = serde_json::Value::String(actor_name_uppercase);
|
||||
|
||||
let old_row = actor;
|
||||
let mut new_row = BTreeMap::from_iter(old_row.iter().map(|(k, v)| (k.clone(), v.clone())));
|
||||
let mut new_row = actor.clone();
|
||||
new_row.insert("name".into(), actor_name_uppercase_value.clone());
|
||||
state.actors.insert(id_int, new_row);
|
||||
let output_row = state.actors.get(actor_id);
|
||||
|
@ -75,7 +75,10 @@ pub(crate) fn execute(
|
||||
}),
|
||||
)
|
||||
})?;
|
||||
let new_row = BTreeMap::from_iter(actor_obj.iter().map(|(k, v)| (k.clone(), v.clone())));
|
||||
let new_row = actor_obj
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), v.clone()))
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
let old_row = state.actors.insert(id_int, new_row);
|
||||
old_row.map_or(Ok(serde_json::Value::Null), |old_row| {
|
||||
let old_row_value = serde_json::to_value(old_row).map_err(|_| {
|
||||
|
@ -52,7 +52,7 @@ fn execute_query_with_variables(
|
||||
) -> Result<ndc_models::RowSet> {
|
||||
let mut argument_values = BTreeMap::new();
|
||||
|
||||
for (argument_name, argument_value) in arguments.iter() {
|
||||
for (argument_name, argument_value) in arguments {
|
||||
if argument_values
|
||||
.insert(
|
||||
argument_name.clone(),
|
||||
@ -144,7 +144,7 @@ fn execute_query(
|
||||
None => Ok(sorted),
|
||||
Some(expr) => {
|
||||
let mut filtered: Vec<Row> = vec![];
|
||||
for item in sorted.into_iter() {
|
||||
for item in sorted {
|
||||
let root = match root {
|
||||
Root::CurrentRow => &item,
|
||||
Root::ExplicitRow(root) => root,
|
||||
@ -171,7 +171,7 @@ fn execute_query(
|
||||
.as_ref()
|
||||
.map(|aggregates| {
|
||||
let mut row: IndexMap<String, serde_json::Value> = IndexMap::new();
|
||||
for (aggregate_name, aggregate) in aggregates.iter() {
|
||||
for (aggregate_name, aggregate) in aggregates {
|
||||
row.insert(
|
||||
aggregate_name.clone(),
|
||||
eval_aggregate(aggregate, &paginated)?,
|
||||
@ -186,7 +186,7 @@ fn execute_query(
|
||||
.as_ref()
|
||||
.map(|fields| {
|
||||
let mut rows: Vec<IndexMap<String, ndc_models::RowFieldValue>> = vec![];
|
||||
for item in paginated.iter() {
|
||||
for item in &paginated {
|
||||
let row = eval_row(fields, collection_relationships, variables, state, item)?;
|
||||
rows.push(row)
|
||||
}
|
||||
@ -205,7 +205,7 @@ fn eval_row(
|
||||
item: &BTreeMap<String, Value>,
|
||||
) -> Result<IndexMap<String, ndc_models::RowFieldValue>> {
|
||||
let mut row = IndexMap::new();
|
||||
for (field_name, field) in fields.iter() {
|
||||
for (field_name, field) in fields {
|
||||
row.insert(
|
||||
field_name.clone(),
|
||||
eval_field(collection_relationships, variables, state, field, item)?,
|
||||
@ -331,9 +331,9 @@ fn sort(
|
||||
None => Ok(collection),
|
||||
Some(order_by) => {
|
||||
let mut copy = vec![];
|
||||
for item_to_insert in collection.into_iter() {
|
||||
for item_to_insert in collection {
|
||||
let mut index = 0;
|
||||
for other in copy.iter() {
|
||||
for other in © {
|
||||
if let Ordering::Greater = eval_order_by(
|
||||
collection_relationships,
|
||||
variables,
|
||||
@ -376,7 +376,7 @@ fn eval_order_by(
|
||||
) -> Result<Ordering> {
|
||||
let mut result = Ordering::Equal;
|
||||
|
||||
for element in order_by.elements.iter() {
|
||||
for element in &order_by.elements {
|
||||
let v1 = eval_order_by_element(collection_relationships, variables, state, element, t1)?;
|
||||
let v2 = eval_order_by_element(collection_relationships, variables, state, element, t2)?;
|
||||
let x = match element.order_direction {
|
||||
@ -519,7 +519,7 @@ fn eval_path(
|
||||
) -> Result<Vec<Row>> {
|
||||
let mut result: Vec<Row> = vec![item.clone()];
|
||||
|
||||
for path_element in path.iter() {
|
||||
for path_element in path {
|
||||
let relationship_name = path_element.relationship.as_str();
|
||||
let relationship = collection_relationships.get(relationship_name).ok_or((
|
||||
StatusCode::BAD_REQUEST,
|
||||
@ -570,10 +570,10 @@ fn eval_path_element(
|
||||
// should consist of all object relationships, and possibly terminated by a
|
||||
// single array relationship, so there should be no double counting.
|
||||
|
||||
for src_row in source.iter() {
|
||||
for src_row in source {
|
||||
let mut all_arguments = BTreeMap::new();
|
||||
|
||||
for (argument_name, argument_value) in relationship.arguments.iter() {
|
||||
for (argument_name, argument_value) in &relationship.arguments {
|
||||
if all_arguments
|
||||
.insert(
|
||||
argument_name.clone(),
|
||||
@ -591,7 +591,7 @@ fn eval_path_element(
|
||||
}
|
||||
}
|
||||
|
||||
for (argument_name, argument_value) in arguments.iter() {
|
||||
for (argument_name, argument_value) in arguments {
|
||||
if all_arguments
|
||||
.insert(
|
||||
argument_name.clone(),
|
||||
@ -615,7 +615,7 @@ fn eval_path_element(
|
||||
state,
|
||||
)?;
|
||||
|
||||
for tgt_row in target.iter() {
|
||||
for tgt_row in &target {
|
||||
if let Some(predicate) = predicate {
|
||||
if eval_column_mapping(relationship, src_row, tgt_row)?
|
||||
&& eval_expression(
|
||||
@ -694,7 +694,7 @@ fn eval_expression(
|
||||
) -> Result<bool> {
|
||||
match expr {
|
||||
ndc_models::Expression::And { expressions } => {
|
||||
for expr in expressions.iter() {
|
||||
for expr in expressions {
|
||||
if !eval_expression(collection_relationships, variables, state, expr, root, item)? {
|
||||
return Ok(false);
|
||||
}
|
||||
@ -702,7 +702,7 @@ fn eval_expression(
|
||||
Ok(true)
|
||||
}
|
||||
ndc_models::Expression::Or { expressions } => {
|
||||
for expr in expressions.iter() {
|
||||
for expr in expressions {
|
||||
if eval_expression(collection_relationships, variables, state, expr, root, item)? {
|
||||
return Ok(true);
|
||||
}
|
||||
@ -757,8 +757,8 @@ fn eval_expression(
|
||||
root,
|
||||
item,
|
||||
)?;
|
||||
for left_val in left_vals.iter() {
|
||||
for right_val in right_vals.iter() {
|
||||
for left_val in &left_vals {
|
||||
for right_val in &right_vals {
|
||||
if left_val == right_val {
|
||||
return Ok(true);
|
||||
}
|
||||
@ -784,8 +784,8 @@ fn eval_expression(
|
||||
root,
|
||||
item,
|
||||
)?;
|
||||
for column_val in column_vals.iter() {
|
||||
for regex_val in regex_vals.iter() {
|
||||
for column_val in &column_vals {
|
||||
for regex_val in ®ex_vals {
|
||||
let column_str = column_val.as_str().ok_or((
|
||||
StatusCode::BAD_REQUEST,
|
||||
Json(ndc_models::ErrorResponse {
|
||||
@ -921,7 +921,7 @@ fn eval_comparison_target(
|
||||
ndc_models::ComparisonTarget::Column { name, path } => {
|
||||
let rows = eval_path(collection_relationships, variables, state, path, item)?;
|
||||
let mut values = vec![];
|
||||
for row in rows.iter() {
|
||||
for row in &rows {
|
||||
let value = eval_column(row, name.as_str())?;
|
||||
values.push(value);
|
||||
}
|
||||
@ -1121,7 +1121,7 @@ fn eval_column_mapping(
|
||||
src_row: &Row,
|
||||
tgt_row: &Row,
|
||||
) -> Result<bool> {
|
||||
for (src_column, tgt_column) in relationship.column_mapping.iter() {
|
||||
for (src_column, tgt_column) in &relationship.column_mapping {
|
||||
let src_value = eval_column(src_row, src_column)?;
|
||||
let tgt_value = eval_column(tgt_row, tgt_column)?;
|
||||
if src_value != tgt_value {
|
||||
|
@ -191,7 +191,7 @@ pub(crate) fn test_introspection_expectation(
|
||||
|
||||
// Execute the test
|
||||
let mut responses = Vec::new();
|
||||
for session in sessions.iter() {
|
||||
for session in &sessions {
|
||||
let response = execute_query(
|
||||
&test_ctx.http_context,
|
||||
&schema,
|
||||
@ -303,7 +303,7 @@ pub fn test_execution_expectation(
|
||||
query,
|
||||
variables: None,
|
||||
};
|
||||
for session in sessions.iter() {
|
||||
for session in &sessions {
|
||||
let response = execute_query(
|
||||
&test_ctx.http_context,
|
||||
&schema,
|
||||
|
@ -61,7 +61,7 @@ fn build_filter_expression<'s>(
|
||||
// The "_and" field value should be a list
|
||||
let and_values = field.value.as_list()?;
|
||||
|
||||
for value in and_values.iter() {
|
||||
for value in and_values {
|
||||
// Each value in the list should be an object
|
||||
let value_object = value.as_object()?;
|
||||
and_expressions.push(resolve_filter_object(
|
||||
@ -84,7 +84,7 @@ fn build_filter_expression<'s>(
|
||||
// The "_or" field value should be a list
|
||||
let or_values = field.value.as_list()?;
|
||||
|
||||
for value in or_values.iter() {
|
||||
for value in or_values {
|
||||
let value_object = value.as_object()?;
|
||||
or_expressions.push(resolve_filter_object(
|
||||
value_object,
|
||||
|
@ -32,7 +32,7 @@ pub(crate) fn build_ndc_order_by<'s>(
|
||||
let mut ndc_order_elements = Vec::new();
|
||||
let mut relationships = BTreeMap::new();
|
||||
|
||||
for v in arguments.iter() {
|
||||
for v in arguments {
|
||||
match v {
|
||||
normalized_ast::Value::Object(arguments) => {
|
||||
// Check if the input object contains exactly one key-value pair.
|
||||
@ -140,7 +140,7 @@ pub(crate) fn build_ndc_order_by_element<'s>(
|
||||
// which has a relationship column called `Comments` which has a non-relationship column
|
||||
// called `text`, you'll have to provide the following paths to access the `text` column:
|
||||
// ["UserPosts", "PostsComments"]
|
||||
for path in relationship_paths.iter() {
|
||||
for path in &relationship_paths {
|
||||
order_by_element_path.push(ndc_models::PathElement {
|
||||
relationship: path.0.clone(),
|
||||
arguments: BTreeMap::new(),
|
||||
|
@ -37,7 +37,7 @@ pub fn generate_ir<'n, 's>(
|
||||
.clone()
|
||||
.ok_or_else(|| gql::normalized_ast::Error::NoTypenameFound)?;
|
||||
let mut ir = IndexMap::new();
|
||||
for (alias, field) in selection_set.fields.iter() {
|
||||
for (alias, field) in &selection_set.fields {
|
||||
let field_call = field.field_call()?;
|
||||
let field_ir = match field_call.name.as_str() {
|
||||
"__typename" => Ok(root_field::QueryRootField::TypeName {
|
||||
|
@ -354,7 +354,7 @@ pub(crate) fn build_remote_relationship<'n, 's>(
|
||||
source_field: source_field_path,
|
||||
target_field: target_field_path,
|
||||
target_ndc_column,
|
||||
} in annotation.mappings.iter()
|
||||
} in &annotation.mappings
|
||||
{
|
||||
let source_column = get_field_mapping_of_field_name(
|
||||
type_mappings,
|
||||
@ -434,7 +434,7 @@ pub(crate) fn build_remote_command_relationship<'n, 's>(
|
||||
for metadata_resolve::RelationshipCommandMapping {
|
||||
source_field: source_field_path,
|
||||
argument_name: target_argument_name,
|
||||
} in annotation.mappings.iter()
|
||||
} in &annotation.mappings
|
||||
{
|
||||
let source_column = get_field_mapping_of_field_name(
|
||||
type_mappings,
|
||||
|
@ -99,7 +99,7 @@ pub fn get_all_usage_counts_in_query(ir: &IndexMap<Alias, RootField<'_, '_>>) ->
|
||||
}
|
||||
|
||||
fn extend_usage_count(usage_counts: UsagesCounts, all_usage_counts: &mut UsagesCounts) {
|
||||
for model_count in usage_counts.models_used.into_iter() {
|
||||
for model_count in usage_counts.models_used {
|
||||
let countable_model = &model_count.model;
|
||||
match all_usage_counts
|
||||
.models_used
|
||||
@ -114,7 +114,7 @@ fn extend_usage_count(usage_counts: UsagesCounts, all_usage_counts: &mut UsagesC
|
||||
}
|
||||
}
|
||||
}
|
||||
for command_count in usage_counts.commands_used.into_iter() {
|
||||
for command_count in usage_counts.commands_used {
|
||||
let countable_model = &command_count.command;
|
||||
match all_usage_counts
|
||||
.commands_used
|
||||
|
@ -150,7 +150,7 @@ pub fn generate_request_plan<'n, 's, 'ir>(
|
||||
) -> Result<RequestPlan<'n, 's, 'ir>, error::Error> {
|
||||
let mut request_plan = None;
|
||||
|
||||
for (alias, field) in ir.into_iter() {
|
||||
for (alias, field) in ir {
|
||||
match field {
|
||||
root_field::RootField::QueryRootField(field_ir) => {
|
||||
let mut query_plan = match request_plan {
|
||||
@ -525,7 +525,7 @@ impl ExecuteQueryResult {
|
||||
pub fn to_graphql_response(self) -> gql::http::Response {
|
||||
let mut data = IndexMap::new();
|
||||
let mut errors = Vec::new();
|
||||
for (alias, field_result) in self.root_fields.into_iter() {
|
||||
for (alias, field_result) in self.root_fields {
|
||||
let result = match field_result.result {
|
||||
Ok(value) => value,
|
||||
Err(e) => {
|
||||
@ -622,7 +622,7 @@ async fn execute_query_field_plan<'n, 's, 'ir>(
|
||||
) => {
|
||||
let mut tasks: Vec<_> =
|
||||
Vec::with_capacity(entity_execution_plans.capacity());
|
||||
for query in entity_execution_plans.into_iter() {
|
||||
for query in entity_execution_plans {
|
||||
// We are not running the field plans parallely here, we are just running them concurrently on a single thread.
|
||||
// To run the field plans parallely, we will need to use tokio::spawn for each field plan.
|
||||
let task = async {
|
||||
@ -754,7 +754,7 @@ pub async fn execute_mutation_plan<'n, 's, 'ir>(
|
||||
}
|
||||
}
|
||||
|
||||
for executed_root_field in executed_root_fields.into_iter() {
|
||||
for executed_root_field in executed_root_fields {
|
||||
let (alias, root_field) = executed_root_field;
|
||||
root_fields.insert(alias, root_field);
|
||||
}
|
||||
@ -781,7 +781,7 @@ pub async fn execute_query_plan<'n, 's, 'ir>(
|
||||
})
|
||||
.await;
|
||||
|
||||
for executed_root_field in executed_root_fields.into_iter() {
|
||||
for executed_root_field in executed_root_fields {
|
||||
let (alias, root_field) = executed_root_field;
|
||||
root_fields.insert(alias, root_field);
|
||||
}
|
||||
|
@ -58,7 +58,7 @@ pub(crate) fn ndc_query_ir<'s, 'ir>(
|
||||
.collect();
|
||||
|
||||
// Add the variable arguments which are used for remote joins
|
||||
for (variable_name, variable_argument) in ir.variable_arguments.iter() {
|
||||
for (variable_name, variable_argument) in &ir.variable_arguments {
|
||||
arguments.insert(
|
||||
variable_name.to_string(),
|
||||
ndc_models::Argument::Variable {
|
||||
|
@ -55,14 +55,14 @@ pub(crate) fn collect_relationships(
|
||||
}
|
||||
|
||||
// from filter clause
|
||||
for (name, relationship) in ir.filter_clause.relationships.iter() {
|
||||
for (name, relationship) in &ir.filter_clause.relationships {
|
||||
let result = process_model_relationship_definition(relationship)?;
|
||||
relationships.insert(name.to_string(), result);
|
||||
}
|
||||
|
||||
// from order by clause
|
||||
if let Some(order_by) = &ir.order_by {
|
||||
for (name, relationship) in order_by.relationships.iter() {
|
||||
for (name, relationship) in &order_by.relationships {
|
||||
let result = process_model_relationship_definition(relationship)?;
|
||||
relationships.insert(name.to_string(), result);
|
||||
}
|
||||
@ -90,7 +90,7 @@ pub(crate) fn process_model_relationship_definition(
|
||||
source_field: source_field_path,
|
||||
target_field: _,
|
||||
target_ndc_column,
|
||||
} in mappings.iter()
|
||||
} in mappings
|
||||
{
|
||||
if !matches!(
|
||||
metadata_resolve::relationship_execution_category(
|
||||
@ -159,7 +159,7 @@ pub(crate) fn process_command_relationship_definition(
|
||||
for metadata_resolve::RelationshipCommandMapping {
|
||||
source_field: source_field_path,
|
||||
argument_name: target_argument,
|
||||
} in annotation.mappings.iter()
|
||||
} in &annotation.mappings
|
||||
{
|
||||
if !matches!(
|
||||
metadata_resolve::relationship_execution_category(
|
||||
|
@ -101,7 +101,7 @@ fn collect_argument_from_rows(
|
||||
let mut arguments = HashSet::new();
|
||||
for row_set in lhs_response {
|
||||
if let Some(ref rows) = row_set.rows {
|
||||
for row in rows.iter() {
|
||||
for row in rows {
|
||||
match lhs_response_type {
|
||||
ProcessResponseAs::Array { .. } | ProcessResponseAs::Object { .. } => {
|
||||
collect_argument_from_row(row, join_fields, path, &mut arguments)?;
|
||||
@ -111,7 +111,7 @@ fn collect_argument_from_rows(
|
||||
type_container,
|
||||
} => {
|
||||
let mut command_rows = resolve_command_response_row(row, type_container)?;
|
||||
for command_row in command_rows.iter_mut() {
|
||||
for command_row in &mut command_rows {
|
||||
collect_argument_from_row(
|
||||
command_row,
|
||||
join_fields,
|
||||
|
@ -180,7 +180,7 @@ fn visit_location_path_and_insert_value(
|
||||
description: "expected row; encountered null".into(),
|
||||
})?;
|
||||
|
||||
for inner_row in rows.iter_mut() {
|
||||
for inner_row in &mut rows {
|
||||
insert_value_into_row(
|
||||
path_tail,
|
||||
join_node,
|
||||
@ -199,7 +199,7 @@ fn visit_location_path_and_insert_value(
|
||||
Vec<IndexMap<String, ndc_models::RowFieldValue>>,
|
||||
>(row_field_val.0.clone())
|
||||
{
|
||||
for inner_row in rows.iter_mut() {
|
||||
for inner_row in &mut rows {
|
||||
insert_value_into_row(
|
||||
path_tail,
|
||||
join_node,
|
||||
|
@ -51,7 +51,7 @@ fn build_namespace_schema<'s, S: crate::schema::SchemaContext>(
|
||||
let nr = crate::validation::normalize_request(ns, schema, request)
|
||||
.map_err(|e| Error::NormalizeIntrospectionQuery(e.to_string()))?;
|
||||
let mut result = HashMap::new();
|
||||
for (_alias, field) in nr.selection_set.fields.iter() {
|
||||
for (_alias, field) in &nr.selection_set.fields {
|
||||
let field_call = field.field_call().map_err(|_| Error::FieldCallNotFound)?;
|
||||
match field_call.name.as_str() {
|
||||
"__schema" => {
|
||||
|
@ -99,7 +99,7 @@ impl Line {
|
||||
|
||||
fn append_to(&self, bytes: &[u8], s: &mut String) {
|
||||
let mut i = self.start;
|
||||
for &escape_char_i in self.escape_chars.iter() {
|
||||
for &escape_char_i in &self.escape_chars {
|
||||
s.push_str(unsafe { from_utf8_unchecked(&bytes[i..escape_char_i]) });
|
||||
i = escape_char_i + 1;
|
||||
}
|
||||
@ -186,7 +186,7 @@ fn parse_block_string(bytes: &[u8]) -> Result<(String, Consumed, usize), (Error,
|
||||
|
||||
// Trim the common indentation from the lines (excluding the first line)
|
||||
if let Some(common_indent) = common_indent {
|
||||
for line in lines[1..].iter_mut() {
|
||||
for line in &mut lines[1..] {
|
||||
line.trim_front(common_indent);
|
||||
}
|
||||
}
|
||||
|
@ -291,10 +291,10 @@ impl<'s, S: SchemaContext> SelectionSet<'s, S> {
|
||||
/// be retained and also the `title` field's `FieldCalls` key will now be an empty vector.
|
||||
pub fn filter_field_calls_by_typename(&self, type_name: ast::TypeName) -> SelectionSet<'s, S> {
|
||||
let mut filtered_selection_set_fields = IndexMap::new();
|
||||
for (alias, field) in self.fields.iter() {
|
||||
for (alias, field) in &self.fields {
|
||||
let mut field_calls = HashMap::new();
|
||||
let mut should_retain = false;
|
||||
for (type_name_path, field_call) in field.field_calls.iter() {
|
||||
for (type_name_path, field_call) in &field.field_calls {
|
||||
match type_name_path.as_slice() {
|
||||
[] => {
|
||||
field_calls.insert(vec![], field_call.clone());
|
||||
|
@ -91,7 +91,7 @@ where
|
||||
acc
|
||||
});
|
||||
let mut normalized_fields = IndexMap::new();
|
||||
for (alias, (alias_type, typed_fields)) in field_map.into_iter() {
|
||||
for (alias, (alias_type, typed_fields)) in field_map {
|
||||
let alias = ast::Alias(alias.clone());
|
||||
let (field_calls, selection_set) = merge_fields(
|
||||
namespace,
|
||||
@ -162,7 +162,7 @@ where
|
||||
|
||||
let mut alias_selection_sets = Vec::new();
|
||||
let mut field_calls = HashMap::new();
|
||||
for (reachability, fields) in typed_fields.into_iter() {
|
||||
for (reachability, fields) in typed_fields {
|
||||
let cannonical_field = fields.head;
|
||||
|
||||
let arguments = normalize_arguments(
|
||||
|
@ -83,11 +83,10 @@ pub fn get_argument_mappings<'a>(
|
||||
ArgumentMappingError,
|
||||
> {
|
||||
let mut unconsumed_argument_mappings: BTreeMap<&ArgumentName, &models::ConnectorArgumentName> =
|
||||
BTreeMap::from_iter(
|
||||
argument_mapping
|
||||
.iter()
|
||||
.map(|(k, v)| (k, models::ConnectorArgumentName::ref_cast(v))),
|
||||
);
|
||||
argument_mapping
|
||||
.iter()
|
||||
.map(|(k, v)| (k, models::ConnectorArgumentName::ref_cast(v)))
|
||||
.collect();
|
||||
|
||||
let mut resolved_argument_mappings =
|
||||
BTreeMap::<ArgumentName, models::ConnectorArgumentName>::new();
|
||||
|
@ -208,7 +208,12 @@ pub fn resolve_graphql_config(
|
||||
// TODO: Naveen: Currently we do not allow enabling a specific direction
|
||||
// for orderableField. In future when we support this, we would like to
|
||||
// build different enum types for different variations of directions.
|
||||
if HashSet::from_iter(order_by_enum_type.directions.iter().cloned())
|
||||
let input_directions = order_by_enum_type
|
||||
.directions
|
||||
.iter()
|
||||
.copied()
|
||||
.collect::<HashSet<_>>();
|
||||
if input_directions
|
||||
!= HashSet::from([OrderByDirection::Asc, OrderByDirection::Desc])
|
||||
{
|
||||
let invalid_directions = order_by_enum_type
|
||||
|
@ -474,7 +474,7 @@ fn resolve_model_graphql_api(
|
||||
})?;
|
||||
|
||||
let mut order_by_fields = BTreeMap::new();
|
||||
for (field_name, field_mapping) in field_mappings.iter() {
|
||||
for (field_name, field_mapping) in field_mappings {
|
||||
order_by_fields.insert(
|
||||
field_name.clone(),
|
||||
OrderByExpressionInfo {
|
||||
|
@ -156,7 +156,7 @@ pub(crate) fn resolve_object_boolean_expression_type(
|
||||
})?;
|
||||
|
||||
// validate comparable fields
|
||||
for comparable_field in object_boolean_expression.comparable_fields.iter() {
|
||||
for comparable_field in &object_boolean_expression.comparable_fields {
|
||||
if !object_type_representation
|
||||
.object_type
|
||||
.fields
|
||||
@ -307,7 +307,7 @@ pub fn resolve_boolean_expression_info(
|
||||
graphql_config_error: GraphqlConfigError::MissingFilterInputFieldInGraphqlConfig,
|
||||
})?;
|
||||
|
||||
for (field_name, field_mapping) in field_mappings.iter() {
|
||||
for (field_name, field_mapping) in field_mappings {
|
||||
// Generate comparison expression for fields mapped to simple scalar type
|
||||
if let Some((scalar_type_name, scalar_type_info)) =
|
||||
data_connector_scalar_types::get_simple_scalar(
|
||||
@ -317,9 +317,7 @@ pub fn resolve_boolean_expression_info(
|
||||
{
|
||||
if let Some(graphql_type_name) = &scalar_type_info.comparison_expression_name.clone() {
|
||||
let mut operators = BTreeMap::new();
|
||||
for (op_name, op_definition) in
|
||||
scalar_type_info.scalar_type.comparison_operators.iter()
|
||||
{
|
||||
for (op_name, op_definition) in &scalar_type_info.scalar_type.comparison_operators {
|
||||
operators.insert(
|
||||
OperatorName(op_name.clone()),
|
||||
resolve_ndc_type(
|
||||
|
@ -91,7 +91,7 @@ pub fn resolve_output_type_permission(
|
||||
// exist in this type definition
|
||||
for type_permission in &type_permissions.permissions {
|
||||
if let Some(output) = &type_permission.output {
|
||||
for field_name in output.allowed_fields.iter() {
|
||||
for field_name in &output.allowed_fields {
|
||||
if !object_type_representation.fields.contains_key(field_name) {
|
||||
return Err(Error::UnknownFieldInOutputPermissionsDefinition {
|
||||
field_name: field_name.clone(),
|
||||
@ -124,7 +124,7 @@ pub(crate) fn resolve_input_type_permission(
|
||||
for FieldPreset {
|
||||
field: field_name,
|
||||
value,
|
||||
} in input.field_presets.iter()
|
||||
} in &input.field_presets
|
||||
{
|
||||
// check if the field exists on this type
|
||||
match object_type_representation.fields.get(field_name) {
|
||||
|
@ -158,7 +158,7 @@ impl MetadataAccessor {
|
||||
Metadata::Versioned(MetadataWithVersion::V2(metadata)) => {
|
||||
let mut accessor: MetadataAccessor =
|
||||
MetadataAccessor::new_empty(Some(metadata.flags));
|
||||
for supergraph_object in metadata.supergraph.objects.into_iter() {
|
||||
for supergraph_object in metadata.supergraph.objects {
|
||||
load_metadata_supergraph_object(supergraph_object, &mut accessor);
|
||||
}
|
||||
for subgraph in metadata.subgraphs {
|
||||
|
@ -132,6 +132,7 @@ pub struct OrderByDirectionValues {
|
||||
Serialize,
|
||||
Deserialize,
|
||||
Clone,
|
||||
Copy,
|
||||
Debug,
|
||||
PartialEq,
|
||||
JsonSchema,
|
||||
|
@ -148,7 +148,7 @@ pub fn gen_root_schema_for<T: OpenDd>(
|
||||
|
||||
let mut root_schema = gen.root_schema_for::<Wrapper<T>>();
|
||||
// Generate `$id` metadata field for subschemas
|
||||
for (schema_name, schema) in root_schema.definitions.iter_mut() {
|
||||
for (schema_name, schema) in &mut root_schema.definitions {
|
||||
if let schemars::schema::Schema::Object(ref mut object) = schema {
|
||||
// Don't set $id for references, the $id should be set on the referenced schema
|
||||
if !object.is_ref() {
|
||||
|
@ -139,7 +139,7 @@ pub fn build_boolean_expression_input_schema(
|
||||
|
||||
// relationship fields
|
||||
// TODO(naveen): Add support for command relationships
|
||||
for (rel_name, relationship) in object_type_representation.relationships.iter() {
|
||||
for (rel_name, relationship) in &object_type_representation.relationships {
|
||||
if let metadata_resolve::RelationshipTarget::Model {
|
||||
model_name,
|
||||
relationship_type,
|
||||
|
@ -155,7 +155,7 @@ pub fn build_model_order_by_input_schema(
|
||||
|
||||
// relationship fields
|
||||
// TODO(naveen): Add support for command relationships.
|
||||
for (rel_name, relationship) in object_type_representation.relationships.iter() {
|
||||
for (rel_name, relationship) in &object_type_representation.relationships {
|
||||
if let metadata_resolve::RelationshipTarget::Model {
|
||||
model_name,
|
||||
relationship_type,
|
||||
|
@ -24,7 +24,7 @@ pub fn query_root_schema(
|
||||
) -> Result<gql_schema::Object<GDS>, crate::Error> {
|
||||
let mut fields = BTreeMap::new();
|
||||
for model in gds.metadata.models.values() {
|
||||
for select_unique in model.model.graphql_api.select_uniques.iter() {
|
||||
for select_unique in &model.model.graphql_api.select_uniques {
|
||||
let (field_name, field) = select_one::select_one_field(
|
||||
gds,
|
||||
builder,
|
||||
@ -34,7 +34,7 @@ pub fn query_root_schema(
|
||||
)?;
|
||||
fields.insert(field_name, field);
|
||||
}
|
||||
for select_many in model.model.graphql_api.select_many.iter() {
|
||||
for select_many in &model.model.graphql_api.select_many {
|
||||
let (field_name, field) = select_many::select_many_field(
|
||||
gds,
|
||||
builder,
|
||||
|
@ -49,7 +49,7 @@ pub(crate) fn apollo_federation_field(
|
||||
let entities_field_permissions =
|
||||
get_entities_field_namespace_permissions(object_type_representation, model);
|
||||
|
||||
for (role, model_predicate) in entities_field_permissions.iter() {
|
||||
for (role, model_predicate) in &entities_field_permissions {
|
||||
let role_type_permissions = roles_type_permissions.entry(role.clone()).or_default();
|
||||
role_type_permissions
|
||||
.insert(model.model.data_type.clone(), model_predicate.clone());
|
||||
|
@ -65,7 +65,7 @@ pub(crate) fn relay_node_field(
|
||||
let node_field_permissions =
|
||||
get_node_field_namespace_permissions(object_type_representation, model);
|
||||
|
||||
for (role, model_predicate) in node_field_permissions.iter() {
|
||||
for (role, model_predicate) in &node_field_permissions {
|
||||
let role_type_permissions = roles_type_permissions.entry(role.clone()).or_default();
|
||||
role_type_permissions
|
||||
.insert(model.model.data_type.clone(), model_predicate.clone());
|
||||
|
@ -34,7 +34,7 @@ pub(crate) fn select_one_field(
|
||||
let query_root_field = select_unique.query_root_field.clone();
|
||||
|
||||
let mut arguments = BTreeMap::new();
|
||||
for (field_name, field) in select_unique.unique_identifier.iter() {
|
||||
for (field_name, field) in &select_unique.unique_identifier {
|
||||
let graphql_field_name = mk_name(field_name.0.as_str())?;
|
||||
let argument = gql_schema::InputField::new(
|
||||
graphql_field_name,
|
||||
|
@ -26,7 +26,7 @@ impl<K: Serialize + for<'a> Deserialize<'a> + Eq + Hash, V: Serialize> Serialize
|
||||
S: serde::Serializer,
|
||||
{
|
||||
let mut map = serializer.serialize_map(Some(self.0.len()))?;
|
||||
for (k, v) in self.0.iter() {
|
||||
for (k, v) in &self.0 {
|
||||
let stringified_key = serde_json::to_string(k).map_err(serde::ser::Error::custom)?;
|
||||
map.serialize_entry(&stringified_key, v)?;
|
||||
}
|
||||
@ -43,7 +43,7 @@ impl<'de, K: DeserializeOwned + Hash + Eq + Serialize, V: Deserialize<'de>> Dese
|
||||
{
|
||||
let map: HashMap<String, V> = Deserialize::deserialize(deserializer)?;
|
||||
let mut result = HashMap::new();
|
||||
for (k, v) in map.into_iter() {
|
||||
for (k, v) in map {
|
||||
let k_str = serde_json::from_str(&k).map_err(serde::de::Error::custom)?;
|
||||
result.insert(k_str, v);
|
||||
}
|
||||
|
@ -70,7 +70,7 @@ fn get_doc(attrs: &[syn::Attribute]) -> Option<String> {
|
||||
// Added for backward-compatibility, but perhaps we shouldn't do this
|
||||
// https://github.com/rust-lang/rust/issues/32088
|
||||
if lines.iter().all(|l| l.starts_with('*')) {
|
||||
for line in lines.iter_mut() {
|
||||
for line in &mut lines {
|
||||
*line = line[1..].trim()
|
||||
}
|
||||
while let Some(&"") = lines.first() {
|
||||
|
@ -78,7 +78,7 @@ fn generate_named_fields_value<'a>(
|
||||
fields: &[NamedField<'a>],
|
||||
) -> proc_macro2::TokenStream {
|
||||
let mut field_deserializations = Vec::new();
|
||||
for field in fields.iter() {
|
||||
for field in fields {
|
||||
let field_name = field.field_name;
|
||||
let field_name_str = field.renamed_field.as_str();
|
||||
|
||||
@ -132,7 +132,7 @@ fn generate_named_fields_value<'a>(
|
||||
|
||||
fn impl_json_schema_named_fields(fields: &[NamedField<'_>]) -> proc_macro2::TokenStream {
|
||||
let mut fields_gen = Vec::new();
|
||||
for field in fields.iter() {
|
||||
for field in fields {
|
||||
let field_name = field.renamed_field.as_str();
|
||||
let ty = field.field_type.clone();
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user