From 711750491891878650315ff30e83311a79ba741d Mon Sep 17 00:00:00 2001 From: Antal Spector-Zabusky Date: Thu, 9 Jan 2025 12:32:26 -0500 Subject: [PATCH] chore: respond to review from @jselig-rigetti --- quil-rs/src/parser/lexer/mod.rs | 16 +++++++++++++--- quil-rs/src/parser/token.rs | 7 ++++--- 2 files changed, 17 insertions(+), 6 deletions(-) diff --git a/quil-rs/src/parser/lexer/mod.rs b/quil-rs/src/parser/lexer/mod.rs index 4102ec82..5a0083bc 100644 --- a/quil-rs/src/parser/lexer/mod.rs +++ b/quil-rs/src/parser/lexer/mod.rs @@ -486,9 +486,9 @@ mod tests { case("a", vec![Token::Identifier("a".to_string())]), case("_a-2_b-2_", vec![Token::Identifier("_a-2_b-2_".to_string())]), case("a-2-%var", vec![ - Token::Identifier("a-2".to_string()), - Token::Operator(Operator::Minus), - Token::Variable("var".to_string()) + Token::Identifier("a-2".to_string()), + Token::Operator(Operator::Minus), + Token::Variable("var".to_string()) ]), case("BIT", vec![Token::DataType(DataType::Bit)]), case("BITS", vec![Token::Identifier("BITS".to_string())]), @@ -496,6 +496,16 @@ mod tests { case("nan", vec![Token::Identifier("nan".to_string())]), case("NaNa", vec![Token::Identifier("NaNa".to_string())]), case("nana", vec![Token::Identifier("nana".to_string())]), + case("-NaN", vec![Token::Operator(Operator::Minus), Token::Identifier("NaN".to_string())]), + case("-inf", vec![Token::Operator(Operator::Minus), Token::Identifier("inf".to_string())]), + case("-Infinity", vec![ + Token::Operator(Operator::Minus), + Token::Identifier("Infinity".to_string()) + ]), + case("-inferior", vec![ + Token::Operator(Operator::Minus), + Token::Identifier("inferior".to_string()) + ]), )] fn it_lexes_identifier(input: &str, expected: Vec) { let input = LocatedSpan::new(input); diff --git a/quil-rs/src/parser/token.rs b/quil-rs/src/parser/token.rs index 1b3dffca..894ead83 100644 --- a/quil-rs/src/parser/token.rs +++ b/quil-rs/src/parser/token.rs @@ -67,8 +67,8 @@ where } } -/// The subset of [`Token`]s which (a) do not contain more specific data and (b) are keywords. Used -/// to ensure that keyword-checking remains in sync with the definition of [`Token`]. +/// The subset of [`Token`]s which (a) do not have arguments and (b) are keywords. Used to ensure +/// that keyword-checking remains in sync with the definition of [`Token`]. #[derive(Debug, Copy, Clone, PartialEq, Eq, strum::Display, strum::EnumString)] #[strum(serialize_all = "SCREAMING-KEBAB-CASE")] pub enum KeywordToken { @@ -104,7 +104,8 @@ impl TryFrom for KeywordToken { fn try_from(token: Token) -> Result { // This match is explicit so that if you add a new [`Token`] constructor you have to decide - // if it's a keyword. Please do not add a top-level wildcard match here. + // if it's a keyword. + #[deny(clippy::wildcard_enum_match_arm, clippy::wildcard_in_or_patterns)] match token { Token::As => Ok(KeywordToken::As), Token::Matrix => Ok(KeywordToken::Matrix),