Skip to content

Commit

Permalink
Tokenizer: remove path_escape
Browse files Browse the repository at this point in the history
  • Loading branch information
ehaas committed Oct 21, 2023
1 parent e60d592 commit 37c8622
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 23 deletions.
20 changes: 5 additions & 15 deletions src/Preprocessor.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1249,7 +1249,7 @@ fn reconstructIncludeString(pp: *Preprocessor, param_toks: []const Token) !?[]co
}

for (params) |tok| {
const str = pp.expandedSliceExtra(tok, .preserve_macro_ws, false);
const str = pp.expandedSliceExtra(tok, .preserve_macro_ws);
try pp.char_buf.appendSlice(str);
}

Expand Down Expand Up @@ -1995,12 +1995,7 @@ fn expandMacro(pp: *Preprocessor, tokenizer: *Tokenizer, raw: RawToken) MacroErr
}
}

fn expandedSliceExtra(
pp: *const Preprocessor,
tok: Token,
macro_ws_handling: enum { single_macro_ws, preserve_macro_ws },
path_escapes: bool,
) []const u8 {
fn expandedSliceExtra(pp: *const Preprocessor, tok: Token, macro_ws_handling: enum { single_macro_ws, preserve_macro_ws }) []const u8 {
if (tok.id.lexeme()) |some| {
if (!tok.id.allowsDigraphs(pp.comp) and !(tok.id == .macro_ws and macro_ws_handling == .preserve_macro_ws)) return some;
}
Expand All @@ -2009,7 +2004,6 @@ fn expandedSliceExtra(
.comp = pp.comp,
.index = tok.loc.byte_offset,
.source = .generated,
.path_escapes = path_escapes,
};
if (tok.id == .macro_string) {
while (true) : (tmp_tokenizer.index += 1) {
Expand All @@ -2023,7 +2017,7 @@ fn expandedSliceExtra(

/// Get expanded token source string.
pub fn expandedSlice(pp: *Preprocessor, tok: Token) []const u8 {
return pp.expandedSliceExtra(tok, .single_macro_ws, false);
return pp.expandedSliceExtra(tok, .single_macro_ws);
}

/// Concat two tokens and add the result to pp.generated
Expand Down Expand Up @@ -2408,16 +2402,14 @@ fn defineFn(pp: *Preprocessor, tokenizer: *Tokenizer, macro_name: RawToken, l_pa

/// Handle an #embed directive
fn embed(pp: *Preprocessor, tokenizer: *Tokenizer) MacroError!void {
tokenizer.path_escapes = true;
defer tokenizer.path_escapes = false;
const first = tokenizer.nextNoWS();
const filename_tok = pp.findIncludeFilenameToken(first, tokenizer, .expect_nl_eof) catch |er| switch (er) {
error.InvalidInclude => return,
else => |e| return e,
};

// Check for empty filename.
const tok_slice = pp.expandedSliceExtra(filename_tok, .single_macro_ws, true);
const tok_slice = pp.expandedSliceExtra(filename_tok, .single_macro_ws);
if (tok_slice.len < 3) {
try pp.err(first, .empty_filename);
return;
Expand Down Expand Up @@ -2459,8 +2451,6 @@ fn embed(pp: *Preprocessor, tokenizer: *Tokenizer) MacroError!void {

// Handle a #include directive.
fn include(pp: *Preprocessor, tokenizer: *Tokenizer, which: Compilation.WhichInclude) MacroError!void {
tokenizer.path_escapes = true;
defer tokenizer.path_escapes = false;
const first = tokenizer.nextNoWS();
const new_source = findIncludeSource(pp, tokenizer, first, which) catch |er| switch (er) {
error.InvalidInclude => return,
Expand Down Expand Up @@ -2626,7 +2616,7 @@ fn findIncludeSource(pp: *Preprocessor, tokenizer: *Tokenizer, first: RawToken,
const filename_tok = try pp.findIncludeFilenameToken(first, tokenizer, .expect_nl_eof);

// Check for empty filename.
const tok_slice = pp.expandedSliceExtra(filename_tok, .single_macro_ws, true);
const tok_slice = pp.expandedSliceExtra(filename_tok, .single_macro_ws);
if (tok_slice.len < 3) {
try pp.err(first, .empty_filename);
return error.InvalidInclude;
Expand Down
9 changes: 1 addition & 8 deletions src/Tokenizer.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1003,8 +1003,6 @@ index: u32 = 0,
source: Source.Id,
comp: *const Compilation,
line: u32 = 1,
/// Used to parse include strings with Windows style paths.
path_escapes: bool = false,

pub fn next(self: *Tokenizer) Token {
var state: enum {
Expand All @@ -1015,7 +1013,6 @@ pub fn next(self: *Tokenizer) Token {
U,
L,
string_literal,
path_escape,
char_literal_start,
char_literal,
char_escape_sequence,
Expand Down Expand Up @@ -1233,7 +1230,7 @@ pub fn next(self: *Tokenizer) Token {
},
.string_literal => switch (c) {
'\\' => {
state = if (self.path_escapes) .path_escape else .string_escape_sequence;
state = .string_escape_sequence;
},
'"' => {
self.index += 1;
Expand All @@ -1246,9 +1243,6 @@ pub fn next(self: *Tokenizer) Token {
'\r' => unreachable,
else => {},
},
.path_escape => {
state = .string_literal;
},
.char_literal_start => switch (c) {
'\\' => {
state = .char_escape_sequence;
Expand Down Expand Up @@ -1704,7 +1698,6 @@ pub fn next(self: *Tokenizer) Token {
.start, .line_comment => {},
.u, .u8, .U, .L, .identifier => id = Token.getTokenId(self.comp, self.buf[start..self.index]),
.extended_identifier => id = .extended_identifier,
.path_escape => id = .invalid,

.period2 => {
self.index -= 1;
Expand Down

0 comments on commit 37c8622

Please sign in to comment.