diff --git a/parser/src/tokenizer/lexer.rs b/parser/src/tokenizer/lexer.rs index fe049bc093..2b1adb4cfe 100644 --- a/parser/src/tokenizer/lexer.rs +++ b/parser/src/tokenizer/lexer.rs @@ -25,7 +25,7 @@ use std::fmt; /// Returns a reference to bytes from the given input if the given string is equal to the bytes, /// otherwise returns [`None`]. /// -fn eat<'a>(input: &'a [u8], wanted: &str) -> Option { +fn eat(input: &[u8], wanted: &str) -> Option { let wanted = wanted.as_bytes(); if input.len() < wanted.len() { return None; diff --git a/parser/src/tokenizer/mod.rs b/parser/src/tokenizer/mod.rs index 2cf32ccfc2..9452b483e2 100644 --- a/parser/src/tokenizer/mod.rs +++ b/parser/src/tokenizer/mod.rs @@ -91,7 +91,7 @@ pub(crate) fn tokenize(path: &str, input: StrTendril) -> Result