Remove redundant clones. (#752)

I noticed a few extra calls to `.clone()` while working on an unrelated
refactor. I want to remove them for brevity and simplicity; I don't
expect a performance improvement.

This turns on the Clippy warning `redundant_clone`, which detects
unnecessary calls to `.clone()` (and `.to_string()`).

It is an unstable warning and so might reports some false positives. If
we find any, we can suppress the warning there.

V3_GIT_ORIGIN_REV_ID: a713f29cf862d6f4cb40300105c6b9f96df00676
This commit is contained in:
Samir Talwar 2024-06-24 16:08:11 +02:00 committed by hasura-bot
parent 1199b669cb
commit 1378730d43
13 changed files with 27 additions and 26 deletions

View File

@ -25,6 +25,8 @@ pedantic = { level = "warn", priority = -1 }
# ban printing to stdout and stderr
print_stdout = "warn"
print_stderr = "warn"
# unstable warnings; we might need to suppress them
redundant_clone = "warn"
# disable certain pedantic warnings
doc_markdown = "allow"
implicit_hasher = "allow"

View File

@ -61,7 +61,7 @@ pub(crate) fn execute(
let actor_name_uppercase = actor_name_str.to_uppercase();
let actor_name_uppercase_value = serde_json::Value::String(actor_name_uppercase);
let mut new_row = (*actor_obj).clone();
new_row.insert("name".into(), actor_name_uppercase_value.clone());
new_row.insert("name".into(), actor_name_uppercase_value);
state.actors.insert(id_int, new_row);
let old_row = state.actors.get(&id_int);
Ok(old_row.map_or(Ok(serde_json::Value::Null), |old_row| {

View File

@ -26,8 +26,8 @@ pub fn merge_with_common_metadata(
let common_metadata = fs::read_to_string(common_metadata_path).unwrap();
let test_metadata = fs::read_to_string(metadata_path_string).unwrap();
let mut first_json_value: Value = serde_json::from_str(&common_metadata.to_string()).unwrap();
let second_json_value: Value = serde_json::from_str(&test_metadata.to_string()).unwrap();
let mut first_json_value: Value = serde_json::from_str(&common_metadata).unwrap();
let second_json_value: Value = serde_json::from_str(&test_metadata).unwrap();
first_json_value.merge(&second_json_value);
first_json_value
}

View File

@ -430,7 +430,7 @@ fn assign_join_ids<'s, 'ir>(
}
};
let new_location = Location {
join_node: new_node.clone(),
join_node: new_node,
rest: assign_join_ids(&location.rest, state),
};
(key.to_string(), new_location)
@ -574,7 +574,7 @@ async fn execute_query_field_plan<'n, 's, 'ir>(
);
RootFieldResult::new(
true, // __type(name: String!): __Type ; the type field is nullable
resolve_type_field(selection_set, schema, &type_name, &GDSRoleNamespaceGetter{scope:namespace.clone()}),
resolve_type_field(selection_set, schema, &type_name, &GDSRoleNamespaceGetter{scope:namespace}),
)
}
NodeQueryPlan::SchemaField {
@ -589,7 +589,7 @@ async fn execute_query_field_plan<'n, 's, 'ir>(
);
RootFieldResult::new(
false, // __schema: __Schema! ; the schema field is not nullable
resolve_schema_field(selection_set, schema, &GDSRoleNamespaceGetter{scope:namespace.clone()}),
resolve_schema_field(selection_set, schema, &GDSRoleNamespaceGetter{scope:namespace}),
)
}
NodeQueryPlan::NDCQueryExecution(ndc_query) => RootFieldResult::new(

View File

@ -477,7 +477,7 @@ mod tests {
);
assert_eq!(
Lexer::new("\n\n\r\rfoo").read_next_token(),
Some(Ok(spanned_token(5, 1, 5, 3, Token::from(foo_name.clone()))))
Some(Ok(spanned_token(5, 1, 5, 3, Token::from(foo_name))))
);
}
@ -536,7 +536,7 @@ mod tests {
);
assert_eq!(
Lexer::new(",,,foo,,,").read_next_token(),
Some(Ok(spanned_token(1, 4, 1, 6, Token::from(foo_name.clone()))))
Some(Ok(spanned_token(1, 4, 1, 6, Token::from(foo_name))))
);
}

View File

@ -380,7 +380,7 @@ mod test {
// Test if foo: [Int] = [1,2,3] is coerced to [1,2,3]
let int_typename = ast::TypeName(mk_name!("Int"));
let int_scalar = Scalar {
name: int_typename.clone(),
name: int_typename,
description: None,
directives: vec![],
};
@ -410,7 +410,7 @@ mod test {
// Test if foo: [Int] = 1 is coerced to [1]
let int_typename = ast::TypeName(mk_name!("Int"));
let int_scalar = Scalar {
name: int_typename.clone(),
name: int_typename,
description: None,
directives: vec![],
};
@ -438,7 +438,7 @@ mod test {
// Test if foo: [Int] = null is coerced to null
let int_typename = ast::TypeName(mk_name!("Int"));
let int_scalar = Scalar {
name: int_typename.clone(),
name: int_typename,
description: None,
directives: vec![],
};
@ -464,7 +464,7 @@ mod test {
// Test if foo: [Int] = [1, "b", true] is not coerced
let int_typename = ast::TypeName(mk_name!("Int"));
let int_scalar = Scalar {
name: int_typename.clone(),
name: int_typename,
description: None,
directives: vec![],
};
@ -491,7 +491,7 @@ mod test {
// Test if foo: [[Int]] = 1 is coerced to [[1]]
let int_typename = ast::TypeName(mk_name!("Int"));
let int_scalar = Scalar {
name: int_typename.clone(),
name: int_typename,
description: None,
directives: vec![],
};
@ -521,7 +521,7 @@ mod test {
// Test if foo: [[Int]] = [1,2,3] is not coerced
let int_typename = ast::TypeName(mk_name!("Int"));
let int_scalar = Scalar {
name: int_typename.clone(),
name: int_typename,
description: None,
directives: vec![],
};
@ -547,7 +547,7 @@ mod test {
// Test if foo: [[Int]] = null is coerced to null
let int_typename = ast::TypeName(mk_name!("Int"));
let int_scalar = Scalar {
name: int_typename.clone(),
name: int_typename,
description: None,
directives: vec![],
};
@ -575,7 +575,7 @@ mod test {
// Test if foo: [[Int]] = [[1], [2, 3]] is coerced to [[1], [2, 3]]
let int_typename = ast::TypeName(mk_name!("Int"));
let int_scalar = Scalar {
name: int_typename.clone(),
name: int_typename,
description: None,
directives: vec![],
};

View File

@ -296,7 +296,7 @@ fn resolve_scalar_operand(
return Err(
AggregateExpressionError::AggregateOperandScalarTypeNotFound {
name: aggregate_expression_name.clone(),
type_name: qualified_custom_scalar_name.clone(),
type_name: qualified_custom_scalar_name,
}
.into(),
);

View File

@ -390,10 +390,10 @@ fn resolve_value_expression(
) -> Result<ValueExpression, Error> {
match value_expression_input {
open_dds::permissions::ValueExpression::SessionVariable(session_variable) => {
Ok::<ValueExpression, Error>(ValueExpression::SessionVariable(session_variable.clone()))
Ok::<ValueExpression, Error>(ValueExpression::SessionVariable(session_variable))
}
open_dds::permissions::ValueExpression::Literal(json_value) => {
Ok(ValueExpression::Literal(json_value.clone()))
Ok(ValueExpression::Literal(json_value))
}
open_dds::permissions::ValueExpression::BooleanExpression(_) => {
Err(Error::BooleanExpressionInValueExpressionForHeaderPresetsNotSupported)

View File

@ -48,7 +48,7 @@ pub fn resolve_aggregate_expression(
ModelAggregateExpressionError::ModelAggregateExpressionOperandTypeMismatch {
model_name: model_name.clone(),
aggregate_expression: aggregate_expression_name.clone(),
model_type: model_object_type.clone(),
model_type: model_object_type,
aggregate_operand_type: aggregate_expression.operand.aggregated_type.clone(),
},
);

View File

@ -225,7 +225,7 @@ pub(crate) fn resolve_object_boolean_expression_type(
resolve_boolean_expression_graphql_config(
&data_connector_name,
graphql_type_name.clone(),
graphql_type_name,
subgraph,
scalars,
type_mapping,

View File

@ -176,7 +176,7 @@ fn add_aggregatable_fields(
{
return Err(Error::InternalDuplicateAggregatableField {
aggregate_expression: aggregate_expression.name.clone(),
field_name: field_graphql_name.clone(),
field_name: field_graphql_name,
});
}
}
@ -297,7 +297,7 @@ fn add_aggregation_functions(
{
return Err(Error::AggregationFunctionFieldNameConflict {
aggregate_expression: aggregate_expression.name.clone(),
field_name: field_graphql_name.clone(),
field_name: field_graphql_name,
});
}
}

View File

@ -423,7 +423,7 @@ fn build_model_relationship_schema(
let annotation = FilterRelationshipAnnotation {
source_type: relationship.source.clone(),
relationship_name: relationship.relationship_name.clone(),
target_source: target_model_source.clone(),
target_source: target_model_source,
target_type: target_model.model.data_type.clone(),
target_model_name: target_model.model.name.clone(),
relationship_type: relationship_type.clone(),

View File

@ -114,8 +114,7 @@ fn get_custom_input_type(
.map(|graphql_config| graphql_config.type_name.clone())
.ok_or_else(|| Error::NoGraphQlInputTypeNameForObject {
type_name: gds_type_name.clone(),
})?
.clone(),
})?,
}),
}
.map(|type_id| builder.register_type(type_id))