Skip to content

Commit

Permalink
minor update
Browse files Browse the repository at this point in the history
  • Loading branch information
QuarticCat committed Jul 25, 2023
1 parent 6386241 commit 4e87b65
Show file tree
Hide file tree
Showing 11 changed files with 67 additions and 42 deletions.
4 changes: 3 additions & 1 deletion pag-parser/src/frontend/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -493,7 +493,9 @@ mod test {
dbg!(size_of::<NormalForm>());
let pairs = GrammarParser::parse(Rule::grammar, TEST).unwrap();
let tree = parse_surface_syntax(pairs, &PRATT_PARSER, TEST).unwrap();
let Grammar { lexer, parser } = &tree.node else { unreachable!() };
let Grammar { lexer, parser } = &tree.node else {
unreachable!()
};

println!("\n---------< construct lexer database >----------");
let database = construct_lexer_database(lexer).unwrap();
Expand Down
2 changes: 1 addition & 1 deletion pag-parser/src/frontend/syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ pub fn construct_parser<'src, 'arena>(
};
let mut errs = Vec::new();
for rule in rules {
let ParserRuleDef { active, name, expr, } = &rule.node else {
let ParserRuleDef { active, name, expr } = &rule.node else {
unreachable_branch!("parser should only contain rule definitions")
};
match construct_core_syntax_tree(&parser, expr) {
Expand Down
4 changes: 3 additions & 1 deletion pag-parser/src/fusion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,9 @@ fn generate_children<'src>(
.iter()
.filter(|x| !matches!(x, NormalForm::Empty(..)))
.map(|nf| {
let NormalForm::Sequence { nonterminals, .. } = nf else { unreachable!() };
let NormalForm::Sequence { nonterminals, .. } = nf else {
unreachable!()
};

let mut add_continue = false;
let mut actions = Vec::new();
Expand Down
13 changes: 10 additions & 3 deletions pag-parser/src/nf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,10 @@ pub fn merge_inactive_rules<'src, 'nf>(
let NormalForm::Sequence {
terminal,
nonterminals,
} = j else { continue };
} = j
else {
continue;
};
if nonterminals.contains(&Action::Subroutine(tag)) {
*j = &*arena.alloc(NormalForm::Sequence {
terminal: *terminal,
Expand Down Expand Up @@ -328,9 +331,13 @@ pub fn remove_unreachable_rules<'src>(nfs: &mut NormalForms<'src, '_>, parser: &
return;
}
visited.insert(current);
let Some(tag) = nfs.entries.get(&current) else { return };
let Some(tag) = nfs.entries.get(&current) else {
return;
};
for i in tag {
let NormalForm::Sequence { nonterminals, .. } = i else { continue };
let NormalForm::Sequence { nonterminals, .. } = i else {
continue;
};
for i in nonterminals {
let Action::Subroutine(x) = i else { continue };
dfs(nfs, *x, visited);
Expand Down
4 changes: 3 additions & 1 deletion pag-parser/src/type_system/fixpoint.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,9 @@ fn find_neighbors(
Term::Fix(_, expr) => find_neighbors(expr, neighbors, sym_to_id),
Term::ParserRef(symbol) => {
// unexisted IDs refer to implicit fixpoints
let Some(&id) = sym_to_id.get(symbol) else { return };
let Some(&id) = sym_to_id.get(symbol) else {
return;
};
neighbors.push(id);
}
_ => {}
Expand Down
4 changes: 3 additions & 1 deletion pag-parser/src/utilities.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,9 @@ fn is_ascii_ident_head(x: &u8) -> bool {
}

fn is_ascii_ident(s: &str) -> bool {
let [x, xs@..] = s.as_bytes() else { return false };
let [x, xs @ ..] = s.as_bytes() else {
return false;
};
is_ascii_ident_head(x) && xs.iter().all(is_ascii_ident_body)
}

Expand Down
4 changes: 2 additions & 2 deletions pag-parser2/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,5 @@ authors.workspace = true
readme.workspace = true

[dependencies]
syn = "2.0.27"
quote = "1.0.9"
syn = { version = "2.0.27", features = ["full"] }
quote = "1.0.9"
16 changes: 8 additions & 8 deletions pag-parser2/src/frontend/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ use std::collections::HashMap;

pub struct Ast {
pub entry: syn::Ident,
pub skip: Option<LexerTree>,
pub lexer_map: HashMap<syn::Ident, LexerTree>,
pub skip: Option<LexerExpr>,
pub lexer_map: HashMap<syn::Ident, LexerExpr>,
pub parser_map: HashMap<syn::Ident, ParserDef>,
}

Expand All @@ -21,18 +21,18 @@ pub struct ParserDef {
}

pub struct ParserRule {
pub bindings: Vec<ParserBinding>,
pub vars: Vec<VarBinding>,
pub action: Option<syn::Block>,
}

pub struct ParserBinding {
pub struct VarBinding {
pub name: Option<syn::Ident>,
pub ty: Option<syn::Type>,
pub tree: ParserTree,
pub expr: ParserExpr,
}

// TODO: how to express "bottom" & "empty"?
pub enum LexerTree {
// TODO: how to express "bottom" & "any"?
pub enum LexerExpr {
Alt(Vec<Self>),
Seq(Vec<Self>),
And(Vec<Self>),
Expand All @@ -46,7 +46,7 @@ pub enum LexerTree {
}

// TODO: how to express "select" & "ignore"?
pub enum ParserTree {
pub enum ParserExpr {
Seq(Vec<Self>),
Star(Box<Self>),
Plus(Box<Self>),
Expand Down
42 changes: 18 additions & 24 deletions pag-parser2/src/frontend/parse.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
use super::ast::*;

use syn::parse::{Parse, ParseStream};
use syn::punctuated::Punctuated;
use syn::{parse_quote, Token};

use std::collections::HashMap;
Expand Down Expand Up @@ -50,7 +49,7 @@ impl Parse for Ast {
}
"skip" => {
input.parse::<Token![=]>()?;
skip = Some(input.parse::<LexerTree>()?);
skip = Some(input.parse::<LexerExpr>()?);
}
_ => return Err(syn::Error::new(ident.span(), "invalid keyword")),
}
Expand All @@ -60,7 +59,7 @@ impl Parse for Ast {
match ident_kind(&ident) {
IdentKind::LexerName => {
input.parse::<Token![=]>()?;
lexer_map.insert(ident, input.parse::<LexerTree>()?);
lexer_map.insert(ident, input.parse::<LexerExpr>()?);
}
IdentKind::ParserName => {
parser_map.insert(ident, input.parse::<ParserDef>()?);
Expand Down Expand Up @@ -90,19 +89,14 @@ impl Parse for ParserDef {

input.parse::<Token![=]>()?;

// let mut rules = Vec::new();
// loop {
// rules.push(input.parse::<ParserRule>()?);
// if !input.peek(Token![|]) {
// break;
// }
// input.parse::<Token![|]>();
// }

// TODO: check whether this is in-place
let rules = Punctuated::<ParserRule, Token![|]>::parse_separated_nonempty(input)?
.into_iter()
.collect();
let mut rules = Vec::new();
loop {
rules.push(input.parse::<ParserRule>()?);
if !input.peek(Token![|]) {
break;
}
input.parse::<Token![|]>()?;
}

Ok(Self { ty, rules })
}
Expand All @@ -111,21 +105,21 @@ impl Parse for ParserDef {
impl Parse for ParserRule {
// (ParserBinding)+ syn::Block?
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut bindings = Vec::new();
let mut vars = Vec::new();
while !input.peek(syn::token::Brace) && !input.peek(Token![|]) && !input.peek(Token![;]) {
bindings.push(input.parse::<ParserBinding>()?);
vars.push(input.parse::<VarBinding>()?);
}

let mut action = None;
if input.peek(syn::token::Brace) {
action = Some(input.parse::<syn::Block>()?);
}

Ok(Self { bindings, action })
Ok(Self { vars, action })
}
}

impl Parse for ParserBinding {
impl Parse for VarBinding {
// ("$" syn::Ident ("<" syn::Type ">")? ":")? ParserTree
fn parse(input: ParseStream) -> syn::Result<Self> {
let mut name = None;
Expand All @@ -144,20 +138,20 @@ impl Parse for ParserBinding {
input.parse::<Token![:]>()?;
}

let tree = input.parse::<ParserTree>()?;
let expr = input.parse::<ParserExpr>()?;

Ok(Self { name, ty, tree })
Ok(Self { name, ty, expr })
}
}

impl Parse for LexerTree {
impl Parse for LexerExpr {
// pratt parsing
fn parse(_input: ParseStream) -> syn::Result<Self> {
todo!()
}
}

impl Parse for ParserTree {
impl Parse for ParserExpr {
// pratt parsing
fn parse(_input: ParseStream) -> syn::Result<Self> {
todo!()
Expand Down
8 changes: 8 additions & 0 deletions pag-parser2/src/nf/mod.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
// Copyright (c) 2023 Paguroidea Developers
//
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.

use quote::format_ident;
use syn::Ident;
mod semact;
Expand Down
8 changes: 8 additions & 0 deletions pag-parser2/src/nf/semact.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
// Copyright (c) 2023 Paguroidea Developers
//
// Licensed under the Apache License, Version 2.0
// <LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0> or the MIT
// license <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.

use std::collections::HashMap;

use quote::format_ident;
Expand Down

0 comments on commit 4e87b65

Please sign in to comment.