diff --git a/examples/c/src/parser.rs b/examples/c/src/parser.rs index 448dff4..04e905c 100644 --- a/examples/c/src/parser.rs +++ b/examples/c/src/parser.rs @@ -481,7 +481,7 @@ pub fn tokenize( include!(concat!(env!("OUT_DIR"), "/generated.rs")); -impl<'a> Parser<'a> { +impl Parser<'_> { fn check_missing_type_specifier( &self, decl_specs: Option, @@ -567,7 +567,7 @@ impl<'a> Parser<'a> { } #[allow(clippy::ptr_arg)] -impl<'a> PredicatesAndActions for Parser<'a> { +impl PredicatesAndActions for Parser<'_> { fn build(&mut self, rule: Rule, node: NodeRef, diags: &mut Vec) { match rule { Rule::Declaration => { diff --git a/examples/l/src/parser.rs b/examples/l/src/parser.rs index d8532a4..610a6d4 100644 --- a/examples/l/src/parser.rs +++ b/examples/l/src/parser.rs @@ -107,7 +107,7 @@ pub fn tokenize( (tokens, ranges) } -impl<'a> PredicatesAndActions for Parser<'a> { +impl PredicatesAndActions for Parser<'_> { fn predicate_param_list_1(&self) -> bool { self.peek(1) != Token::RPar } diff --git a/examples/lua/src/parser.rs b/examples/lua/src/parser.rs index 184575f..66d96e9 100644 --- a/examples/lua/src/parser.rs +++ b/examples/lua/src/parser.rs @@ -93,7 +93,9 @@ fn parse_comments(lexer: &mut Lexer<'_, Token>) -> Result<(), LexerError> { } fn parse_first_line_comment(lexer: &mut Lexer<'_, Token>) -> Token { if lexer.span().start == 0 { - lexer.remainder().find('\n').map(|i| lexer.bump(i + 1)); + if let Some(i) = lexer.remainder().find('\n') { + lexer.bump(i + 1) + } return Token::FirstLineComment; } Token::Hash @@ -268,7 +270,7 @@ pub fn tokenize( include!(concat!(env!("OUT_DIR"), "/generated.rs")); -impl<'a> PredicatesAndActions for Parser<'a> { +impl PredicatesAndActions for Parser<'_> { fn build(&mut self, rule: Rule, node: NodeRef, diags: &mut Vec) { match rule { Rule::Expstat => {