Skip to content

Commit

Permalink
Preprocessor: implement computed include
Browse files Browse the repository at this point in the history
Closes #287
  • Loading branch information
Vexu committed Nov 25, 2023
1 parent 500446a commit 9d538ea
Show file tree
Hide file tree
Showing 3 changed files with 60 additions and 21 deletions.
7 changes: 4 additions & 3 deletions src/aro/Diagnostics.zig
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ pub fn set(d: *Diagnostics, name: []const u8, to: Kind) !void {
try d.addExtra(.{}, .{
.tag = .unknown_warning,
.extra = .{ .str = name },
}, &.{});
}, &.{}, true);
}

pub fn init(gpa: Allocator) Diagnostics {
Expand All @@ -251,14 +251,15 @@ pub fn deinit(d: *Diagnostics) void {
}

pub fn add(comp: *Compilation, msg: Message, expansion_locs: []const Source.Location) Compilation.Error!void {
return comp.diagnostics.addExtra(comp.langopts, msg, expansion_locs);
return comp.diagnostics.addExtra(comp.langopts, msg, expansion_locs, true);
}

pub fn addExtra(
d: *Diagnostics,
langopts: LangOpts,
msg: Message,
expansion_locs: []const Source.Location,
note_msg_loc: bool,
) Compilation.Error!void {
const kind = d.tagKind(msg.tag, langopts);
if (kind == .off) return;
Expand Down Expand Up @@ -301,7 +302,7 @@ pub fn addExtra(
}
}

d.list.appendAssumeCapacity(.{
if (note_msg_loc) d.list.appendAssumeCapacity(.{
.tag = .expanded_from_here,
.kind = .note,
.loc = msg.loc,
Expand Down
71 changes: 53 additions & 18 deletions src/aro/Preprocessor.zig
Original file line number Diff line number Diff line change
Expand Up @@ -747,6 +747,17 @@ fn fatal(pp: *Preprocessor, raw: RawToken, comptime fmt: []const u8, args: anyty
return error.FatalError;
}

fn fatalNotFound(pp: *Preprocessor, tok: Token, filename: []const u8) Compilation.Error {
const old = pp.comp.diagnostics.fatal_errors;
pp.comp.diagnostics.fatal_errors = true;
defer pp.comp.diagnostics.fatal_errors = old;

try pp.comp.diagnostics.addExtra(pp.comp.langopts, .{ .tag = .cli_error, .loc = tok.loc, .extra = .{
.str = try std.fmt.allocPrint(pp.comp.diagnostics.arena.allocator(), "'{s}' not found", .{filename}),
} }, tok.expansionSlice(), false);
unreachable; // addExtra should've returned FatalError
}

fn verboseLog(pp: *Preprocessor, raw: RawToken, comptime fmt: []const u8, args: anytype) void {
const source = pp.comp.getSource(raw.source);
const line_col = source.lineCol(.{ .id = raw.source, .line = raw.line, .byte_offset = raw.start });
Expand Down Expand Up @@ -2703,6 +2714,7 @@ fn embed(pp: *Preprocessor, tokenizer: *Tokenizer) MacroError!void {
error.InvalidInclude => return,
else => |e| return e,
};
defer Token.free(filename_tok.expansion_locs, pp.gpa);

// Check for empty filename.
const tok_slice = pp.expandedSliceExtra(filename_tok, .single_macro_ws);
Expand Down Expand Up @@ -2836,7 +2848,7 @@ fn embed(pp: *Preprocessor, tokenizer: *Tokenizer) MacroError!void {
}

const embed_bytes = (try pp.comp.findEmbed(filename, first.source, include_type, limit)) orelse
return pp.fatal(first, "'{s}' not found", .{filename});
return pp.fatalNotFound(filename_tok, filename);
defer pp.comp.gpa.free(embed_bytes);

try Range.expand(prefix, pp, tokenizer);
Expand Down Expand Up @@ -2984,8 +2996,6 @@ fn findIncludeFilenameToken(
tokenizer: *Tokenizer,
trailing_token_behavior: enum { ignore_trailing_tokens, expect_nl_eof },
) !Token {
const start = pp.tokens.len;
defer pp.tokens.len = start;
var first = first_token;

if (first.id == .angle_bracket_left) to_end: {
Expand All @@ -3008,35 +3018,60 @@ fn findIncludeFilenameToken(
}, &.{});
try pp.err(first, .header_str_match);
}
// Try to expand if the argument is a macro.
try pp.expandMacro(tokenizer, first);

// Check that we actually got a string.
const filename_tok = pp.tokens.get(start);
switch (filename_tok.id) {
.string_literal, .macro_string => {},
else => {
try pp.err(first, .expected_filename);
try pp.expectNl(tokenizer);
return error.InvalidInclude;
const source_tok = tokFromRaw(first);
const filename_tok, const expanded_trailing = switch (source_tok.id) {
.string_literal, .macro_string => .{ source_tok, false },
else => expanded: {
// Try to expand if the argument is a macro.
pp.top_expansion_buf.items.len = 0;
defer for (pp.top_expansion_buf.items) |tok| Token.free(tok.expansion_locs, pp.gpa);
try pp.top_expansion_buf.append(source_tok);
pp.expansion_source_loc = source_tok.loc;

try pp.expandMacroExhaustive(tokenizer, &pp.top_expansion_buf, 0, 1, true, .non_expr);
var trailing_toks: []const Token = &.{};
const include_str = (try pp.reconstructIncludeString(pp.top_expansion_buf.items, &trailing_toks)) orelse {
try pp.err(first, .expected_filename);
try pp.expectNl(tokenizer);
return error.InvalidInclude;
};
const start = pp.comp.generated_buf.items.len;
try pp.comp.generated_buf.appendSlice(pp.gpa, include_str);

break :expanded .{ try pp.makeGeneratedToken(start, switch (include_str[0]) {
'"' => .string_literal,
'<' => .macro_string,
else => unreachable,
}, pp.top_expansion_buf.items[0]), trailing_toks.len != 0 };
},
}
};

switch (trailing_token_behavior) {
.expect_nl_eof => {
// Error on extra tokens.
const nl = tokenizer.nextNoWS();
if ((nl.id != .nl and nl.id != .eof) or pp.tokens.len > start + 1) {
if ((nl.id != .nl and nl.id != .eof) or expanded_trailing) {
skipToNl(tokenizer);
try pp.err(first, .extra_tokens_directive_end);
try pp.comp.diagnostics.addExtra(pp.comp.langopts, .{
.tag = .extra_tokens_directive_end,
.loc = filename_tok.loc,
}, filename_tok.expansionSlice(), false);
}
},
.ignore_trailing_tokens => {},
.ignore_trailing_tokens => if (expanded_trailing) {
try pp.comp.diagnostics.addExtra(pp.comp.langopts, .{
.tag = .extra_tokens_directive_end,
.loc = filename_tok.loc,
}, filename_tok.expansionSlice(), false);
},
}
return filename_tok;
}

fn findIncludeSource(pp: *Preprocessor, tokenizer: *Tokenizer, first: RawToken, which: Compilation.WhichInclude) !Source {
const filename_tok = try pp.findIncludeFilenameToken(first, tokenizer, .expect_nl_eof);
defer Token.free(filename_tok.expansion_locs, pp.gpa);

// Check for empty filename.
const tok_slice = pp.expandedSliceExtra(filename_tok, .single_macro_ws);
Expand All @@ -3054,7 +3089,7 @@ fn findIncludeSource(pp: *Preprocessor, tokenizer: *Tokenizer, first: RawToken,
};

return (try pp.comp.findInclude(filename, first, include_type, which)) orelse
pp.fatal(first, "'{s}' not found", .{filename});
return pp.fatalNotFound(filename_tok, filename);
}

fn printLinemarker(
Expand Down
3 changes: 3 additions & 0 deletions test/cases/builtin headers.c
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,6 @@
#include <stdatomic.h>
#include <stdbool.h>
/* a */ # /* b */ include /* c */ <stddef.h>

#define HEADER <stdarg.h>
#include HEADER

0 comments on commit 9d538ea

Please sign in to comment.