Skip to content

Commit

Permalink
chore: respond to review from @jselig-rigetti
Browse files Browse the repository at this point in the history
  • Loading branch information
antalsz committed Jan 9, 2025
1 parent 6d682d7 commit 7117504
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 6 deletions.
16 changes: 13 additions & 3 deletions quil-rs/src/parser/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -486,16 +486,26 @@ mod tests {
case("a", vec![Token::Identifier("a".to_string())]),
case("_a-2_b-2_", vec![Token::Identifier("_a-2_b-2_".to_string())]),
case("a-2-%var", vec![
Token::Identifier("a-2".to_string()),
Token::Operator(Operator::Minus),
Token::Variable("var".to_string())
Token::Identifier("a-2".to_string()),
Token::Operator(Operator::Minus),
Token::Variable("var".to_string())
]),
case("BIT", vec![Token::DataType(DataType::Bit)]),
case("BITS", vec![Token::Identifier("BITS".to_string())]),
case("NaN", vec![Token::Identifier("NaN".to_string())]),
case("nan", vec![Token::Identifier("nan".to_string())]),
case("NaNa", vec![Token::Identifier("NaNa".to_string())]),
case("nana", vec![Token::Identifier("nana".to_string())]),
case("-NaN", vec![Token::Operator(Operator::Minus), Token::Identifier("NaN".to_string())]),
case("-inf", vec![Token::Operator(Operator::Minus), Token::Identifier("inf".to_string())]),
case("-Infinity", vec![
Token::Operator(Operator::Minus),
Token::Identifier("Infinity".to_string())
]),
case("-inferior", vec![
Token::Operator(Operator::Minus),
Token::Identifier("inferior".to_string())
]),
)]
fn it_lexes_identifier(input: &str, expected: Vec<Token>) {
let input = LocatedSpan::new(input);
Expand Down
7 changes: 4 additions & 3 deletions quil-rs/src/parser/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,8 +67,8 @@ where
}
}

/// The subset of [`Token`]s which (a) do not contain more specific data and (b) are keywords. Used
/// to ensure that keyword-checking remains in sync with the definition of [`Token`].
/// The subset of [`Token`]s which (a) do not have arguments and (b) are keywords. Used to ensure
/// that keyword-checking remains in sync with the definition of [`Token`].
#[derive(Debug, Copy, Clone, PartialEq, Eq, strum::Display, strum::EnumString)]
#[strum(serialize_all = "SCREAMING-KEBAB-CASE")]
pub enum KeywordToken {
Expand Down Expand Up @@ -104,7 +104,8 @@ impl TryFrom<Token> for KeywordToken {

fn try_from(token: Token) -> Result<Self, Self::Error> {
// This match is explicit so that if you add a new [`Token`] constructor you have to decide
// if it's a keyword. Please do not add a top-level wildcard match here.
// if it's a keyword.
#[deny(clippy::wildcard_enum_match_arm, clippy::wildcard_in_or_patterns)]
match token {
Token::As => Ok(KeywordToken::As),
Token::Matrix => Ok(KeywordToken::Matrix),
Expand Down

0 comments on commit 7117504

Please sign in to comment.