Skip to content

Commit

Permalink
Generate trait for predicates and actions
Browse files Browse the repository at this point in the history
  • Loading branch information
0x2a-42 committed Jun 19, 2024
1 parent 59c2b53 commit 4fcba1b
Show file tree
Hide file tree
Showing 9 changed files with 122 additions and 92 deletions.
123 changes: 63 additions & 60 deletions examples/c/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -481,7 +481,6 @@ pub fn tokenize(

include!(concat!(env!("OUT_DIR"), "/generated.rs"));

#[allow(clippy::ptr_arg)]
impl<'a> Parser<'a> {
fn check_missing_type_specifier(
&self,
Expand All @@ -507,6 +506,69 @@ impl<'a> Parser<'a> {
}
}

fn is_type_name(&self, pos: usize) -> bool {
let range = &self.cst.ranges[pos];
let name = &self.cst.source[range.start..range.end];
for scopes in self.context.scopes.iter().rev() {
if let Some(is_type) = scopes.declared_names.get(name) {
return *is_type;
}
}
false
}

fn is_parenthesized_type(&self) -> bool {
let lookahead = self.peek(1);
if matches!(
lookahead,
Token::Alignas
| Token::Atomic
| Token::Attribute
| Token::Bool
| Token::Char
| Token::Complex
| Token::Const
| Token::Double
| Token::Enum
| Token::Float
| Token::Imaginary
| Token::Int
| Token::Int128
| Token::Long
| Token::Restrict
| Token::Short
| Token::Signed
| Token::Struct
| Token::TypeOf
| Token::Union
| Token::Unsigned
| Token::Void
| Token::Volatile
) {
return true;
}
if lookahead == Token::Identifier {
self.cst.tokens[self.pos..]
.iter()
.enumerate()
.filter_map(|(i, tok)| {
if !Self::is_skipped(*tok) {
Some(i)
} else {
None
}
})
.nth(1)
.map_or(false, |i| self.is_type_name(self.pos + i))
} else {
false
}
}

}

#[allow(clippy::ptr_arg)]
impl<'a> PredicatesAndActions for Parser<'a> {
fn build(&mut self, rule: Rule, node: NodeRef, diags: &mut Vec<Diagnostic>) {
match rule {
Rule::Declaration => {
Expand Down Expand Up @@ -583,65 +645,6 @@ impl<'a> Parser<'a> {
}
}

fn is_type_name(&self, pos: usize) -> bool {
let range = &self.cst.ranges[pos];
let name = &self.cst.source[range.start..range.end];
for scopes in self.context.scopes.iter().rev() {
if let Some(is_type) = scopes.declared_names.get(name) {
return *is_type;
}
}
false
}

fn is_parenthesized_type(&self) -> bool {
let lookahead = self.peek(1);
if matches!(
lookahead,
Token::Alignas
| Token::Atomic
| Token::Attribute
| Token::Bool
| Token::Char
| Token::Complex
| Token::Const
| Token::Double
| Token::Enum
| Token::Float
| Token::Imaginary
| Token::Int
| Token::Int128
| Token::Long
| Token::Restrict
| Token::Short
| Token::Signed
| Token::Struct
| Token::TypeOf
| Token::Union
| Token::Unsigned
| Token::Void
| Token::Volatile
) {
return true;
}
if lookahead == Token::Identifier {
self.cst.tokens[self.pos..]
.iter()
.enumerate()
.filter_map(|(i, tok)| {
if !Self::is_skipped(*tok) {
Some(i)
} else {
None
}
})
.nth(1)
.map_or(false, |i| self.is_type_name(self.pos + i))
} else {
false
}
}

fn predicate_postfix_expr_1(&self) -> bool {
self.is_parenthesized_type()
}
Expand Down
5 changes: 1 addition & 4 deletions examples/calc/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,4 @@ pub fn tokenize(

include!(concat!(env!("OUT_DIR"), "/generated.rs"));

impl Parser<'_> {
#[allow(clippy::ptr_arg)]
fn build(&mut self, _rule: Rule, _node: NodeRef, _diags: &mut Vec<Diagnostic>) {}
}
impl PredicatesAndActions for Parser<'_> {}
5 changes: 1 addition & 4 deletions examples/json/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -189,7 +189,4 @@ pub fn tokenize(

include!(concat!(env!("OUT_DIR"), "/generated.rs"));

impl Parser<'_> {
#[allow(clippy::ptr_arg)]
fn build(&mut self, _rule: Rule, _node: NodeRef, _diags: &mut Vec<Diagnostic>) {}
}
impl PredicatesAndActions for Parser<'_> {}
4 changes: 1 addition & 3 deletions examples/l/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,9 +107,7 @@ pub fn tokenize(
(tokens, ranges)
}

impl<'a> Parser<'a> {
#[allow(clippy::ptr_arg)]
fn build(&mut self, _rule: Rule, _node: NodeRef, _diags: &mut Vec<Diagnostic>) {}
impl<'a> PredicatesAndActions for Parser<'a> {
fn predicate_param_list_1(&self) -> bool {
self.peek(1) != Token::RPar
}
Expand Down
2 changes: 1 addition & 1 deletion examples/lua/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -260,7 +260,7 @@ pub fn tokenize(

include!(concat!(env!("OUT_DIR"), "/generated.rs"));

impl<'a> Parser<'a> {
impl<'a> PredicatesAndActions for Parser<'a> {
fn build(&mut self, rule: Rule, node: NodeRef, diags: &mut Vec<Diagnostic>) {
match rule {
Rule::Expstat => {
Expand Down
5 changes: 1 addition & 4 deletions examples/oberon0/src/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,4 @@ pub fn tokenize(

include!(concat!(env!("OUT_DIR"), "/generated.rs"));

impl Parser<'_> {
#[allow(clippy::ptr_arg)]
fn build(&mut self, _rule: Rule, _node: NodeRef, _diags: &mut Vec<Diagnostic>) {}
}
impl PredicatesAndActions for Parser<'_> {}
59 changes: 47 additions & 12 deletions src/backend/rust.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,26 +104,59 @@ impl RustOutput {

parser_file.write_all(template.as_bytes())?;

parser_file.write_all(
b"impl<'a> Parser<'a> {\
\n fn build(&mut self, rule: Rule, node: NodeRef, diags: &mut Vec<Diagnostic>) {}\n",
)?;
Self::output_predicates_and_actions(&mut parser_file, sema, false)
}

fn output_predicates_and_actions(
output: &mut std::fs::File,
sema: &SemanticData,
is_trait: bool,
) -> std::io::Result<()> {
output.write_all(if is_trait {
b"trait PredicatesAndActions {\
\n /// Called when a new syntax tree node is created\
\n #[allow(clippy::ptr_arg)]\
\n fn build(&mut self, _rule: Rule, _node: NodeRef, _diags: &mut Vec<Diagnostic>) {}\n"
} else {
b"impl<'a> PredicatesAndActions for Parser<'a> {\n"
})?;
let mut predicates = HashSet::new();
for (rule, num) in sema.predicates.values() {
parser_file.write_all(
if predicates.contains(&(rule, num)) {
continue;
}
predicates.insert((rule, num));
output.write_all(
format!(
" fn predicate_{rule}_{num}(&self) -> bool {{\n todo!()\n }}\n"
" fn predicate_{rule}_{num}(&self) -> bool{}\n",
if is_trait {
";"
} else {
" {\n todo!()\n }"
}
)
.as_bytes(),
)?;
}
let mut actions = HashSet::new();
for (rule, num) in sema.actions.values() {
parser_file.write_all(
format!(" fn action_{rule}_{num}(&mut self, diags: &mut Vec<Diagnostic>) {{\n todo!()\n }}\n")
.as_bytes(),
if actions.contains(&(rule, num)) {
continue;
}
actions.insert((rule, num));
output.write_all(
format!(
" fn action_{rule}_{num}(&mut self, diags: &mut Vec<Diagnostic>){}\n",
if is_trait {
";"
} else {
" {\n todo!()\n }"
}
)
.as_bytes(),
)?;
}
parser_file.write_all(b"}\n")?;
Ok(())
output.write_all(b"}\n")
}

fn output_node_kind_decl(
Expand Down Expand Up @@ -1005,6 +1038,8 @@ impl RustOutput {
for rule in file.rule_decls(cst) {
Self::output_rule(cst, sema, rule, output, &token_symbols)?;
}
output.write_all(b"}\n")
output.write_all(b"}\n\n")?;

Self::output_predicates_and_actions(output, sema, true)
}
}
6 changes: 6 additions & 0 deletions src/frontend/generated.rs
Original file line number Diff line number Diff line change
Expand Up @@ -814,3 +814,9 @@ impl<'a> Parser<'a> {
self.close(m, Rule::Atomic, diags);
}
}

trait PredicatesAndActions {
/// Called when a new syntax tree node is created
#[allow(clippy::ptr_arg)]
fn build(&mut self, _rule: Rule, _node: NodeRef, _diags: &mut Vec<Diagnostic>) {}
}
5 changes: 1 addition & 4 deletions src/frontend/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,4 @@ pub fn tokenize(

include!("./generated.rs");

impl Parser<'_> {
#[allow(clippy::ptr_arg)]
fn build(&mut self, _rule: Rule, _node: NodeRef, _diags: &mut Vec<Diagnostic>) {}
}
impl PredicatesAndActions for Parser<'_> {}

0 comments on commit 4fcba1b

Please sign in to comment.