Optimize some clone() calls.

This commit is contained in:
jcamiel 2023-11-27 16:30:00 +01:00
parent ebff4dc602
commit 9fc9bee091
No known key found for this signature in database
GPG Key ID: 07FF11CFD55356CC
10 changed files with 27 additions and 51 deletions

View File

@ -134,7 +134,7 @@ fn encode_bytes(b: Vec<u8>) -> String {
impl Method {
pub fn curl_args(&self, data: bool) -> Vec<String> {
match self.0.clone().as_str() {
match self.0.as_str() {
"GET" => {
if data {
vec!["--request".to_string(), "GET".to_string()]

View File

@ -70,7 +70,7 @@ pub fn number(reader: &mut Reader) -> ParseResult<Number> {
let decimal = if reader.try_literal(".") {
if reader.is_eof() {
return Err(Error {
pos: reader.clone().state.pos,
pos: reader.state.pos,
recoverable: false,
inner: ParseError::Expecting {
value: String::from("natural"),
@ -81,7 +81,7 @@ pub fn number(reader: &mut Reader) -> ParseResult<Number> {
let s = reader.read_while(|c| c.is_ascii_digit());
if s.is_empty() {
return Err(Error {
pos: reader.clone().state.pos,
pos: reader.state.pos,
recoverable: false,
inner: ParseError::Expecting {
value: String::from("natural"),
@ -188,7 +188,7 @@ pub fn literal(s: &str, reader: &mut Reader) -> ParseResult<()> {
// non recoverable reader
// => use combinator recover to make it recoverable
let start = reader.state;
if reader.clone().is_eof() {
if reader.is_eof() {
return Err(Error {
pos: start.pos,
recoverable: false,

View File

@ -53,7 +53,7 @@ pub fn render_expression(expr: &Expr, variables: &HashMap<String, Value>) -> Res
let name = &expr.variable.name;
let value = eval_expression(expr, variables)?;
if value.is_renderable() {
Ok(value.clone().to_string())
Ok(value.to_string())
} else {
let inner = RunnerError::UnrenderableVariable {
name: name.to_string(),

View File

@ -35,7 +35,7 @@ impl Error for parser::Error {
}
fn description(&self) -> String {
match self.clone().inner {
match self.inner {
ParseError::DuplicateSection => "Parsing section".to_string(),
ParseError::EscapeChar => "Parsing escape character".to_string(),
ParseError::Expecting { .. } => "Parsing literal".to_string(),

View File

@ -164,10 +164,10 @@ fn escape_char(reader: &mut Reader) -> ParseResult<char> {
Some('r') => Ok('\r'),
Some('t') => Ok('\t'),
Some('u') => unicode(reader),
_ => Err(error::Error {
_ => Err(Error {
pos: start.pos,
recoverable: false,
inner: error::ParseError::EscapeChar,
inner: ParseError::EscapeChar,
}),
}
}
@ -176,10 +176,10 @@ fn unicode(reader: &mut Reader) -> ParseResult<char> {
let v = hex_value(reader)?;
let c = match std::char::from_u32(v) {
None => {
return Err(error::Error {
pos: reader.clone().state.pos,
return Err(Error {
pos: reader.state.pos,
recoverable: false,
inner: error::ParseError::Unicode,
inner: ParseError::Unicode,
})
}
Some(c) => c,

View File

@ -351,7 +351,7 @@ pub fn regex(reader: &mut Reader) -> ParseResult<Regex> {
//
// To fit nicely in Hurl Error reporting, you need an error message string that does not spread on multiple lines
// You will assume that the error most relevant description is on the last line
let lines = s.split('\n').clone().collect::<Vec<&str>>();
let lines = s.split('\n').collect::<Vec<&str>>();
let last_line = lines.last().expect("at least one line");
last_line
.strip_prefix("error: ")
@ -479,7 +479,7 @@ pub fn float(reader: &mut Reader) -> ParseResult<Float> {
if reader.is_eof() {
return Err(Error {
pos: reader.clone().state.pos,
pos: reader.state.pos,
recoverable: false,
inner: ParseError::Expecting {
value: String::from("natural"),
@ -490,7 +490,7 @@ pub fn float(reader: &mut Reader) -> ParseResult<Float> {
let s = reader.read_while(|c| c.is_ascii_digit());
if s.is_empty() {
return Err(Error {
pos: reader.clone().state.pos,
pos: reader.state.pos,
recoverable: false,
inner: ParseError::Expecting {
value: String::from("natural"),

View File

@ -318,30 +318,6 @@ fn assert(reader: &mut Reader) -> ParseResult<Assert> {
let space1 = one_or_more_spaces(reader)?;
let predicate0 = predicate(reader)?;
// Specifics for jsonpath //
// jsonpath always return a list
// the equals predicate will be used as "firstEquals"
// you also need the firstStartsWith => not really orthogonal!!
/* let predicate0 = Predicate {
not: predicate0.clone().not,
space0: predicate0.clone().space0,
predicate_func: PredicateFunc {
source_info: predicate0.clone().predicate_func.source_info,
value: if query0.clone().is_jsonpath() {
match predicate0.clone().predicate_func.value {
PredicateFuncValue::EqualBool { space0, value } => PredicateFuncValue::FirstEqualBool { space0, value },
PredicateFuncValue::EqualInt { space0, value } => PredicateFuncValue::FirstEqualInt { space0, value },
PredicateFuncValue::EqualString { space0, value } => PredicateFuncValue::FirstEqualString { space0, value },
PredicateFuncValue::CountEqual { space0, value } => PredicateFuncValue::FirstCountEqual { space0, value },
PredicateFuncValue::StartWith { space0, value } => PredicateFuncValue::FirstStartWith { space0, value },
_ => predicate0.clone().predicate_func.value
}
} else {
predicate0.clone().predicate_func.value
},
},
};
*/
let line_terminator0 = line_terminator(reader)?;
Ok(Assert {
line_terminators,

View File

@ -232,7 +232,7 @@ pub(crate) fn unicode(reader: &mut Reader) -> ParseResult<char> {
let c = match std::char::from_u32(v) {
None => {
return Err(Error {
pos: reader.clone().state.pos,
pos: reader.state.pos,
recoverable: false,
inner: ParseError::Unicode,
});

View File

@ -66,7 +66,7 @@ impl Tokenizable for Entry {
fn tokenize(&self) -> Vec<Token> {
let mut tokens: Vec<Token> = vec![];
tokens.append(&mut self.request.tokenize());
if let Some(response) = self.clone().response {
if let Some(response) = &self.response {
tokens.append(&mut response.tokenize());
}
tokens
@ -90,7 +90,7 @@ impl Tokenizable for Request {
tokens.append(&mut self.line_terminator0.tokenize());
tokens.append(&mut self.headers.iter().flat_map(|e| e.tokenize()).collect());
tokens.append(&mut self.sections.iter().flat_map(|e| e.tokenize()).collect());
if let Some(body) = self.clone().body {
if let Some(body) = &self.body {
tokens.append(&mut body.tokenize());
}
tokens
@ -360,7 +360,7 @@ impl Tokenizable for Capture {
tokens.append(&mut self.space2.tokenize());
tokens.append(&mut self.query.tokenize());
for (space, filter) in &self.filters {
tokens.append(&mut space.clone().tokenize());
tokens.append(&mut space.tokenize());
tokens.append(&mut filter.tokenize());
}
tokens.append(&mut self.line_terminator0.tokenize());
@ -381,7 +381,7 @@ impl Tokenizable for Assert {
tokens.append(&mut self.space0.tokenize());
tokens.append(&mut self.query.tokenize());
for (space, filter) in &self.filters {
tokens.append(&mut space.clone().tokenize());
tokens.append(&mut space.tokenize());
tokens.append(&mut filter.tokenize());
}
tokens.append(&mut self.space1.tokenize());
@ -395,7 +395,7 @@ impl Tokenizable for Assert {
impl Tokenizable for Query {
fn tokenize(&self) -> Vec<Token> {
self.value.clone().tokenize()
self.value.tokenize()
}
}
@ -682,14 +682,14 @@ impl Tokenizable for EncodedString {
let mut tokens: Vec<Token> = vec![];
if self.quotes {
tokens.push(Token::StringDelimiter(
if self.clone().quotes { "\"" } else { "" }.to_string(),
if self.quotes { "\"" } else { "" }.to_string(),
));
}
tokens.push(Token::String(self.encoded.clone()));
if self.quotes {
tokens.push(Token::StringDelimiter(
if self.clone().quotes { "\"" } else { "" }.to_string(),
if self.quotes { "\"" } else { "" }.to_string(),
));
}
tokens
@ -749,7 +749,7 @@ impl Tokenizable for LineTerminator {
fn tokenize(&self) -> Vec<Token> {
let mut tokens: Vec<Token> = vec![];
tokens.append(&mut self.space0.tokenize());
if let Some(comment) = self.clone().comment {
if let Some(comment) = &self.comment {
tokens.append(&mut comment.tokenize());
}
tokens.append(&mut self.newline.tokenize());
@ -761,7 +761,7 @@ impl Tokenizable for Whitespace {
fn tokenize(&self) -> Vec<Token> {
let mut tokens: Vec<Token> = vec![];
if !self.value.is_empty() {
tokens.push(Token::Whitespace(self.clone().value));
tokens.push(Token::Whitespace(self.value.clone()));
}
tokens
}
@ -769,13 +769,13 @@ impl Tokenizable for Whitespace {
impl Tokenizable for Comment {
fn tokenize(&self) -> Vec<Token> {
vec![Token::Comment(format!("#{}", self.clone().value))]
vec![Token::Comment(format!("#{}", self.value.clone()))]
}
}
impl Tokenizable for Filename {
fn tokenize(&self) -> Vec<Token> {
let s = self.clone().value.replace(' ', "\\ ");
let s = self.value.replace(' ', "\\ ");
vec![Token::String(s)]
}
}

View File

@ -587,7 +587,7 @@ fn lint_line_terminator(line_terminator: &LineTerminator) -> LineTerminator {
let space0 = match line_terminator.comment {
None => empty_whitespace(),
Some(_) => Whitespace {
value: line_terminator.clone().space0.value,
value: line_terminator.space0.value.clone(),
source_info: SourceInfo::new(Pos::new(0, 0), Pos::new(0, 0)),
},
};