Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(glsl-lang-quote): correctly parse #(ident) at line start #49

Merged
merged 4 commits into from
Jul 23, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
[workspace]
resolver = "2"
default-members = [
"lang",
"lang-pp",
Expand Down
37 changes: 37 additions & 0 deletions lang-lexer/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -52,3 +52,40 @@ pub trait LangLexerIterator:
err: lalrpop_util::ParseError<LexerPosition, Token, Self::Error>,
) -> lang_util::error::ParseError<Self::Error>;
}

#[cfg(test)]
mod tests {
use super::*;

const HASH_IDENT_TEST_CASE: &str = "# (ident) = hello";

fn test_hash_ident_with_lexer<'i>(lexer: impl LangLexer<'i, Input = &'i str>) {
let tokens: Vec<_> = lexer.run(ParseContext::default()).collect();
eprintln!("{:#?}", tokens);
assert!(tokens.len() > 1);
}

#[cfg(feature = "v2-min")]
#[test]
fn test_hash_ident_v2_min() {
test_hash_ident_with_lexer(v2_min::str::Lexer::new(
HASH_IDENT_TEST_CASE,
&ParseOptions {
allow_rs_ident: true,
..Default::default()
},
));
}

#[cfg(feature = "v2-full")]
#[test]
fn test_hash_ident_v2_full() {
test_hash_ident_with_lexer(v2_full::str::Lexer::new(
HASH_IDENT_TEST_CASE,
&ParseOptions {
allow_rs_ident: true,
..Default::default()
},
));
}
}
125 changes: 86 additions & 39 deletions lang-pp/src/last.rs
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,7 @@ mod tests {
use lang_util::FileId;
use rowan::NodeOrToken;

use crate::processor::event::DirectiveKind;
use crate::{processor::event::DirectiveKind, types::Token};

use super::{Event, MaybeToken};

Expand Down Expand Up @@ -370,52 +370,99 @@ mod tests {
fn test_hash_ident() {
use super::Token::*;

let mut tokens = Vec::new();

let mut tokenizer = crate::processor::str::process(
let inputs = [
"#(ident) = hello",
crate::processor::ProcessorState::default(),
)
.tokenize(100, false, &crate::exts::DEFAULT_REGISTRY);

#[allow(clippy::while_let_on_iterator)]
while let Some(result) = tokenizer.next() {
if let Ok(event) = result {
match event {
Event::Token { token_kind, .. } => {
tokens.push(token_kind);
}
Event::Directive { directive, .. }
if matches!(directive.kind(), DirectiveKind::Invalid(_)) =>
{
// Extract the tokens from the directive parse tree
tokens.extend(
directive
.node
.descendants_with_tokens()
.filter_map(NodeOrToken::into_token)
.map(|token| {
tokenizer.tokenize_single(&(&token, FileId::default())).0
}),
);
}
_ => {}
}
}
}

assert_eq!(
&tokens,
"# (ident) = hello",
"#(ident.clone()) = hello",
"# (ident.clone()) = hello",
];
let outputs: [&[Token]; 4] = [
&[
HASH,
LPAREN,
IDENT("ident".into()),
RPAREN,
WS,
EQUAL,
WS,
IDENT("hello".into()),
],
&[
HASH,
WS,
LPAREN,
IDENT("ident".into()),
RPAREN,
WS,
EQUAL,
WS,
IDENT("hello".into()),
],
&[
HASH,
LPAREN,
IDENT("ident".into()),
PERIOD,
IDENT("clone".into()),
LPAREN,
RPAREN,
RPAREN,
WS,
EQUAL,
WS,
IDENT("hello".into()),
],
&[
HASH,
WS,
LPAREN,
IDENT("ident".into()),
PERIOD,
IDENT("clone".into()),
LPAREN,
RPAREN,
RPAREN,
WS,
EQUAL,
WS,
IDENT("hello".into())
]
);
IDENT("hello".into()),
],
];

for (input, output) in inputs.iter().zip(outputs.iter()) {
let mut tokens = Vec::new();

let mut tokenizer =
crate::processor::str::process(input, crate::processor::ProcessorState::default())
.tokenize(100, false, &crate::exts::DEFAULT_REGISTRY);

#[allow(clippy::while_let_on_iterator)]
while let Some(result) = tokenizer.next() {
if let Ok(event) = result {
match event {
Event::Token { token_kind, .. } => {
tokens.push(token_kind);
}
Event::Directive { directive, .. }
if matches!(directive.kind(), DirectiveKind::Invalid(_)) =>
{
// Extract the tokens from the directive parse tree
tokens.extend(
directive
.node
.descendants_with_tokens()
.filter_map(NodeOrToken::into_token)
.map(|token| {
tokenizer.tokenize_single(&(&token, FileId::default())).0
}),
);
}
_ => {}
}
}
}

assert_eq!(&tokens, output);
}
}
}
49 changes: 49 additions & 0 deletions lang-pp/src/parser/syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -149,6 +149,19 @@ fn if_section_or_control_line(parser: &mut ParserRun) {
pp_extension(parser);
Some(PP_EXTENSION)
}
"(" => {
parser.bump();
match pp_rs_ident(parser, token) {
Ok(()) => {
// Abort parsing a control directive, we parsed an rs_ident
return;
}
Err(err) => {
error = err.into();
}
}
None
}
other => {
error = Some((
ErrorKind::UnknownPreprocessorDirective { name: other.into() },
Expand Down Expand Up @@ -231,6 +244,42 @@ fn if_section_or_control_line(parser: &mut ParserRun) {
}
}

fn pp_rs_ident(
parser: &mut ParserRun,
token: lexer::TextToken,
) -> Result<(), (ErrorKind, TextRange)> {
let mut level = 1;
while level > 0 {
// Skip whitespace
parser.skip_trivia();

let peeked = parser.peek().map(|tt| SmolStr::from(parser.text(tt)));
match peeked.as_deref() {
Some("(") => {
level += 1;
parser.bump();
}
Some(")") => {
level -= 1;
parser.bump();
}
Some(_) => {
parser.bump();
}
None => {
return Err((
ErrorKind::EndOfInput {
expected: Box::new([lexer::Token::RPAREN]),
},
token.range,
));
}
}
}

Ok(())
}

fn pp_include(parser: &mut ParserRun) {
// We're about to parse a path
parser.input.set_expect_angle_string(true);
Expand Down
14 changes: 14 additions & 0 deletions lang-pp/src/parser/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -267,3 +267,17 @@ fn test_eof() {
"##]],
);
}

#[test]
fn test_hash_ident() {
check(
parse("#(ident)"),
expect![[r##"
[email protected]
[email protected] "#"
[email protected] "("
[email protected] "ident"
[email protected] ")"
"##]],
);
}
66 changes: 31 additions & 35 deletions lang-pp/src/processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,42 +208,38 @@ impl<'r> ProcessorStateBuilder<'r> {
pub fn finish(self) -> ProcessorState {
let one = DefineObject::one();

let mut state = ProcessorState {
// No #include extensions enabled
include_mode: IncludeMode::None,
// Spec 3.3, "There is a built-in macro definition for each profile the implementation
// supports. All implementations provide the following macro:
// `#define GL_core_profile 1`
definitions: self
.core_profile
.then(|| Define::object("GL_core_profile".into(), one.clone(), true))
.into_iter()
.chain(
self.compatibility_profile
.then(|| {
Define::object("GL_compatibility_profile".into(), one.clone(), true)
})
.into_iter(),
)
.chain(
self.es_profile
.then(|| Define::object("GL_es_profile".into(), one.clone(), true))
.into_iter(),
)
.chain(self.definitions.into_iter())
.map(|definition| Definition::Regular(definition.into(), FileId::default()))
.chain([Definition::Line, Definition::File, Definition::Version].into_iter())
.chain(self.registry.all().map(|spec| {
Definition::Regular(
Define::object(spec.name().as_ref().into(), one.clone(), true).into(),
FileId::default(),
let mut state =
ProcessorState {
// No #include extensions enabled
include_mode: IncludeMode::None,
// Spec 3.3, "There is a built-in macro definition for each profile the implementation
// supports. All implementations provide the following macro:
// `#define GL_core_profile 1`
definitions: self
.core_profile
.then(|| Define::object("GL_core_profile".into(), one.clone(), true))
.into_iter()
.chain(self.compatibility_profile.then(|| {
Define::object("GL_compatibility_profile".into(), one.clone(), true)
}))
.chain(
self.es_profile
.then(|| Define::object("GL_es_profile".into(), one.clone(), true)),
)
}))
.map(|definition| (definition.name().into(), definition))
.collect(),
version: Version::default(),
cpp_style_line: false,
};
.chain(self.definitions)
.map(|definition| Definition::Regular(definition.into(), FileId::default()))
.chain([Definition::Line, Definition::File, Definition::Version])
.chain(self.registry.all().map(|spec| {
Definition::Regular(
Define::object(spec.name().as_ref().into(), one.clone(), true).into(),
FileId::default(),
)
}))
.map(|definition| (definition.name().into(), definition))
.collect(),
version: Version::default(),
cpp_style_line: false,
};

for (name, behavior) in self.extensions {
state.add_extension(&name, behavior);
Expand Down
3 changes: 1 addition & 2 deletions lang-pp/src/processor/nodes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -292,8 +292,7 @@ impl VersionProfile {

impl PartialOrd for VersionProfile {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
self.as_feature_set_size_index()
.partial_cmp(&other.as_feature_set_size_index())
Some(self.cmp(other))
}
}

Expand Down
18 changes: 18 additions & 0 deletions lang-quote/tests/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -107,3 +107,21 @@ fn layout_qualifier() {
layout(std140) struct Q { float f; };
};
}

#[test]
fn statement_var_decl() {
let current_mat: ast::Expr =
ast::ExprData::Variable(ast::IdentifierData("main".into()).into()).into();
let work_var: ast::Expr =
ast::ExprData::Variable(ast::IdentifierData("work_var".into()).into()).into();
let work_var_ps: ast::Expr =
ast::ExprData::Variable(ast::IdentifierData("work_var_ps".into()).into()).into();

let _ = glsl_statement! {
{
mat3 mt = #(current_mat);
#(work_var.clone()) = mt * #(work_var);
#(work_var_ps.clone()) = mt * #(work_var_ps);
}
};
}
2 changes: 1 addition & 1 deletion rust-toolchain.toml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
[toolchain]
channel = "1.70.0"
channel = "1.74.0"
components = ["rustfmt", "clippy"]
profile = "minimal"
Loading