Skip to content

Commit

Permalink
Merge pull request #707 from ehaas/crash-fixes
Browse files Browse the repository at this point in the history
More fuzzer-discovered crash fixes
  • Loading branch information
Vexu authored May 23, 2024
2 parents 00273a1 + 97697e7 commit d662d5f
Show file tree
Hide file tree
Showing 14 changed files with 168 additions and 89 deletions.
2 changes: 1 addition & 1 deletion src/aro/Hideset.zig
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ const Item = struct {
const List = std.MultiArrayList(Item);
};

const Index = enum(u32) {
pub const Index = enum(u32) {
none = std.math.maxInt(u32),
_,
};
Expand Down
47 changes: 22 additions & 25 deletions src/aro/Parser.zig
Original file line number Diff line number Diff line change
Expand Up @@ -3030,7 +3030,7 @@ fn directDeclarator(p: *Parser, base_type: Type, d: *Declarator, kind: Declarato
try p.errStr(.array_size_non_int, size_tok, try p.typeStr(size.ty));
return error.ParsingFailed;
}
if (base_type.is(.c23_auto)) {
if (base_type.is(.c23_auto) or outer.is(.invalid)) {
// issue error later
return Type.invalid;
} else if (size.val.opt_ref == .none) {
Expand Down Expand Up @@ -3861,7 +3861,7 @@ fn convertInitList(p: *Parser, il: InitList, init_ty: Type) Error!NodeIndex {
.data = .{ .bin = .{ .lhs = .none, .rhs = .none } },
};

const max_elems = p.comp.maxArrayBytes() / (elem_ty.sizeof(p.comp) orelse 1);
const max_elems = p.comp.maxArrayBytes() / (@max(1, elem_ty.sizeof(p.comp) orelse 1));
if (start > max_elems) {
try p.errTok(.array_too_large, il.tok);
start = max_elems;
Expand Down Expand Up @@ -5746,8 +5746,7 @@ pub const Result = struct {
.{ .complex_float, .float },
// No `_Complex __fp16` type
.{ .invalid, .fp16 },
// No `_Complex _Float16`
.{ .invalid, .float16 },
.{ .complex_float16, .float16 },
};
const a_spec = a.ty.canonicalize(.standard).specifier;
const b_spec = b.ty.canonicalize(.standard).specifier;
Expand All @@ -5765,6 +5764,7 @@ pub const Result = struct {
if (try a.floatConversion(b, a_spec, b_spec, p, float_types[3])) return;
if (try a.floatConversion(b, a_spec, b_spec, p, float_types[4])) return;
if (try a.floatConversion(b, a_spec, b_spec, p, float_types[5])) return;
unreachable;
}

if (a.ty.eql(b.ty, p.comp, true)) {
Expand Down Expand Up @@ -6920,11 +6920,11 @@ fn offsetofMemberDesignator(p: *Parser, base_ty: Type, want_bits: bool) Error!Re
errdefer p.skipTo(.r_paren);
const base_field_name_tok = try p.expectIdentifier();
const base_field_name = try StrInt.intern(p.comp, p.tokSlice(base_field_name_tok));
try p.validateFieldAccess(base_ty, base_ty, base_field_name_tok, base_field_name);
const base_record_ty = base_ty.getRecord().?;
try p.validateFieldAccess(base_record_ty, base_ty, base_field_name_tok, base_field_name);
const base_node = try p.addNode(.{ .tag = .default_init_expr, .ty = base_ty, .data = undefined });

var cur_offset: u64 = 0;
const base_record_ty = base_ty.canonicalize(.standard);
var lhs = try p.fieldAccessExtra(base_node, base_record_ty, base_field_name, false, &cur_offset);

var total_offset = cur_offset;
Expand All @@ -6934,13 +6934,12 @@ fn offsetofMemberDesignator(p: *Parser, base_ty: Type, want_bits: bool) Error!Re
const field_name_tok = try p.expectIdentifier();
const field_name = try StrInt.intern(p.comp, p.tokSlice(field_name_tok));

if (!lhs.ty.isRecord()) {
const lhs_record_ty = lhs.ty.getRecord() orelse {
try p.errStr(.offsetof_ty, field_name_tok, try p.typeStr(lhs.ty));
return error.ParsingFailed;
}
try p.validateFieldAccess(lhs.ty, lhs.ty, field_name_tok, field_name);
const record_ty = lhs.ty.canonicalize(.standard);
lhs = try p.fieldAccessExtra(lhs.node, record_ty, field_name, false, &cur_offset);
};
try p.validateFieldAccess(lhs_record_ty, lhs.ty, field_name_tok, field_name);
lhs = try p.fieldAccessExtra(lhs.node, lhs_record_ty, field_name, false, &cur_offset);
total_offset += cur_offset;
},
.l_bracket => {
Expand Down Expand Up @@ -7517,16 +7516,12 @@ fn fieldAccess(
const expr_ty = lhs.ty;
const is_ptr = expr_ty.isPtr();
const expr_base_ty = if (is_ptr) expr_ty.elemType() else expr_ty;
const record_ty = expr_base_ty.canonicalize(.standard);
const record_ty = expr_base_ty.getRecord() orelse {
try p.errStr(.expected_record_ty, field_name_tok, try p.typeStr(expr_ty));
return error.ParsingFailed;
};

switch (record_ty.specifier) {
.@"struct", .@"union" => {},
else => {
try p.errStr(.expected_record_ty, field_name_tok, try p.typeStr(expr_ty));
return error.ParsingFailed;
},
}
if (record_ty.hasIncompleteSize()) {
if (record_ty.isIncomplete()) {
try p.errStr(.deref_incomplete_ty_ptr, field_name_tok - 2, try p.typeStr(expr_base_ty));
return error.ParsingFailed;
}
Expand All @@ -7539,7 +7534,7 @@ fn fieldAccess(
return p.fieldAccessExtra(lhs.node, record_ty, field_name, is_arrow, &discard);
}

fn validateFieldAccess(p: *Parser, record_ty: Type, expr_ty: Type, field_name_tok: TokenIndex, field_name: StringId) Error!void {
fn validateFieldAccess(p: *Parser, record_ty: *const Type.Record, expr_ty: Type, field_name_tok: TokenIndex, field_name: StringId) Error!void {
if (record_ty.hasField(field_name)) return;

p.strings.items.len = 0;
Expand All @@ -7554,16 +7549,16 @@ fn validateFieldAccess(p: *Parser, record_ty: Type, expr_ty: Type, field_name_to
return error.ParsingFailed;
}

fn fieldAccessExtra(p: *Parser, lhs: NodeIndex, record_ty: Type, field_name: StringId, is_arrow: bool, offset_bits: *u64) Error!Result {
for (record_ty.data.record.fields, 0..) |f, i| {
fn fieldAccessExtra(p: *Parser, lhs: NodeIndex, record_ty: *const Type.Record, field_name: StringId, is_arrow: bool, offset_bits: *u64) Error!Result {
for (record_ty.fields, 0..) |f, i| {
if (f.isAnonymousRecord()) {
if (!f.ty.hasField(field_name)) continue;
const inner = try p.addNode(.{
.tag = if (is_arrow) .member_access_ptr_expr else .member_access_expr,
.ty = f.ty,
.data = .{ .member = .{ .lhs = lhs, .index = @intCast(i) } },
});
const ret = p.fieldAccessExtra(inner, f.ty, field_name, false, offset_bits);
const ret = p.fieldAccessExtra(inner, f.ty.getRecord().?, field_name, false, offset_bits);
offset_bits.* = f.layout.offset_bits;
return ret;
}
Expand Down Expand Up @@ -8279,7 +8274,7 @@ fn parseFloat(p: *Parser, buf: []const u8, suffix: NumberSuffix) !Result {
const ty = Type{ .specifier = switch (suffix) {
.None, .I => .double,
.F, .IF => .float,
.F16 => .float16,
.F16, .IF16 => .float16,
.L, .IL => .long_double,
.W, .IW => p.comp.float80Type().?.specifier,
.Q, .IQ, .F128, .IF128 => .float128,
Expand Down Expand Up @@ -8314,13 +8309,15 @@ fn parseFloat(p: *Parser, buf: []const u8, suffix: NumberSuffix) !Result {
try p.err(.gnu_imaginary_constant);
res.ty = .{ .specifier = switch (suffix) {
.I => .complex_double,
.IF16 => .complex_float16,
.IF => .complex_float,
.IL => .complex_long_double,
.IW => p.comp.float80Type().?.makeComplex().specifier,
.IQ, .IF128 => .complex_float128,
else => unreachable,
} };
res.val = try Value.intern(p.comp, switch (res.ty.bitSizeof(p.comp).?) {
32 => .{ .complex = .{ .cf16 = .{ 0.0, val.toFloat(f16, p.comp) } } },
64 => .{ .complex = .{ .cf32 = .{ 0.0, val.toFloat(f32, p.comp) } } },
128 => .{ .complex = .{ .cf64 = .{ 0.0, val.toFloat(f64, p.comp) } } },
160 => .{ .complex = .{ .cf80 = .{ 0.0, val.toFloat(f80, p.comp) } } },
Expand Down
46 changes: 26 additions & 20 deletions src/aro/Preprocessor.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1562,11 +1562,13 @@ fn getPasteArgs(args: []const TokenWithExpansionLocs) []const TokenWithExpansion

fn expandFuncMacro(
pp: *Preprocessor,
loc: Source.Location,
macro_tok: TokenWithExpansionLocs,
func_macro: *const Macro,
args: *const MacroArguments,
expanded_args: *const MacroArguments,
hideset_arg: Hideset.Index,
) MacroError!ExpandBuf {
var hideset = hideset_arg;
var buf = ExpandBuf.init(pp.gpa);
try buf.ensureTotalCapacity(func_macro.tokens.len);
errdefer buf.deinit();
Expand Down Expand Up @@ -1617,16 +1619,21 @@ fn expandFuncMacro(
},
else => &[1]TokenWithExpansionLocs{tokFromRaw(raw_next)},
};

try pp.pasteTokens(&buf, next);
if (next.len != 0) break;
},
.macro_param_no_expand => {
if (tok_i + 1 < func_macro.tokens.len and func_macro.tokens[tok_i + 1].id == .hash_hash) {
hideset = pp.hideset.get(tokFromRaw(func_macro.tokens[tok_i + 1]).loc);
}
const slice = getPasteArgs(args.items[raw.end]);
const raw_loc = Source.Location{ .id = raw.source, .byte_offset = raw.start, .line = raw.line };
try bufCopyTokens(&buf, slice, &.{raw_loc});
},
.macro_param => {
if (tok_i + 1 < func_macro.tokens.len and func_macro.tokens[tok_i + 1].id == .hash_hash) {
hideset = pp.hideset.get(tokFromRaw(func_macro.tokens[tok_i + 1]).loc);
}
const arg = expanded_args.items[raw.end];
const raw_loc = Source.Location{ .id = raw.source, .byte_offset = raw.start, .line = raw.line };
try bufCopyTokens(&buf, arg, &.{raw_loc});
Expand Down Expand Up @@ -1665,9 +1672,9 @@ fn expandFuncMacro(
const arg = expanded_args.items[0];
const result = if (arg.len == 0) blk: {
const extra = Diagnostics.Message.Extra{ .arguments = .{ .expected = 1, .actual = 0 } };
try pp.comp.addDiagnostic(.{ .tag = .expected_arguments, .loc = loc, .extra = extra }, &.{});
try pp.comp.addDiagnostic(.{ .tag = .expected_arguments, .loc = macro_tok.loc, .extra = extra }, &.{});
break :blk false;
} else try pp.handleBuiltinMacro(raw.id, arg, loc);
} else try pp.handleBuiltinMacro(raw.id, arg, macro_tok.loc);
const start = pp.comp.generated_buf.items.len;
const w = pp.comp.generated_buf.writer(pp.gpa);
try w.print("{}\n", .{@intFromBool(result)});
Expand All @@ -1678,7 +1685,7 @@ fn expandFuncMacro(
const not_found = "0\n";
const result = if (arg.len == 0) blk: {
const extra = Diagnostics.Message.Extra{ .arguments = .{ .expected = 1, .actual = 0 } };
try pp.comp.addDiagnostic(.{ .tag = .expected_arguments, .loc = loc, .extra = extra }, &.{});
try pp.comp.addDiagnostic(.{ .tag = .expected_arguments, .loc = macro_tok.loc, .extra = extra }, &.{});
break :blk not_found;
} else res: {
var invalid: ?TokenWithExpansionLocs = null;
Expand Down Expand Up @@ -1710,7 +1717,7 @@ fn expandFuncMacro(
if (vendor_ident != null and attr_ident == null) {
invalid = vendor_ident;
} else if (attr_ident == null and invalid == null) {
invalid = .{ .id = .eof, .loc = loc };
invalid = .{ .id = .eof, .loc = macro_tok.loc };
}
if (invalid) |some| {
try pp.comp.addDiagnostic(
Expand Down Expand Up @@ -1754,7 +1761,7 @@ fn expandFuncMacro(
const not_found = "0\n";
const result = if (arg.len == 0) blk: {
const extra = Diagnostics.Message.Extra{ .arguments = .{ .expected = 1, .actual = 0 } };
try pp.comp.addDiagnostic(.{ .tag = .expected_arguments, .loc = loc, .extra = extra }, &.{});
try pp.comp.addDiagnostic(.{ .tag = .expected_arguments, .loc = macro_tok.loc, .extra = extra }, &.{});
break :blk not_found;
} else res: {
var embed_args: []const TokenWithExpansionLocs = &.{};
Expand Down Expand Up @@ -1900,11 +1907,11 @@ fn expandFuncMacro(
break;
},
};
if (string == null and invalid == null) invalid = .{ .loc = loc, .id = .eof };
if (string == null and invalid == null) invalid = .{ .loc = macro_tok.loc, .id = .eof };
if (invalid) |some| try pp.comp.addDiagnostic(
.{ .tag = .pragma_operator_string_literal, .loc = some.loc },
some.expansionSlice(),
) else try pp.pragmaOperator(string.?, loc);
) else try pp.pragmaOperator(string.?, macro_tok.loc);
},
.comma => {
if (tok_i + 2 < func_macro.tokens.len and func_macro.tokens[tok_i + 1].id == .hash_hash) {
Expand Down Expand Up @@ -1953,6 +1960,15 @@ fn expandFuncMacro(
}
removePlacemarkers(&buf);

const macro_expansion_locs = macro_tok.expansionSlice();
for (buf.items) |*tok| {
try tok.addExpansionLocation(pp.gpa, &.{macro_tok.loc});
try tok.addExpansionLocation(pp.gpa, macro_expansion_locs);
const tok_hidelist = pp.hideset.get(tok.loc);
const new_hidelist = try pp.hideset.@"union"(tok_hidelist, hideset);
try pp.hideset.put(tok.loc, new_hidelist);
}

return buf;
}

Expand Down Expand Up @@ -2291,19 +2307,9 @@ fn expandMacroExhaustive(
expanded_args.appendAssumeCapacity(try expand_buf.toOwnedSlice());
}

var res = try pp.expandFuncMacro(macro_tok.loc, macro, &args, &expanded_args);
var res = try pp.expandFuncMacro(macro_tok, macro, &args, &expanded_args, hs);
defer res.deinit();
const tokens_added = res.items.len;

const macro_expansion_locs = macro_tok.expansionSlice();
for (res.items) |*tok| {
try tok.addExpansionLocation(pp.gpa, &.{macro_tok.loc});
try tok.addExpansionLocation(pp.gpa, macro_expansion_locs);
const tok_hidelist = pp.hideset.get(tok.loc);
const new_hidelist = try pp.hideset.@"union"(tok_hidelist, hs);
try pp.hideset.put(tok.loc, new_hidelist);
}

const tokens_removed = macro_scan_idx - idx + 1;
for (buf.items[idx .. idx + tokens_removed]) |tok| TokenWithExpansionLocs.free(tok.expansion_locs, pp.gpa);
try buf.replaceRange(idx, tokens_removed, res.items);
Expand Down
9 changes: 5 additions & 4 deletions src/aro/Tree/number_affixes.zig
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@ pub const Suffix = enum {
// float and imaginary float
F, IF,

// _Float16
F16,
// _Float16 and imaginary _Float16
F16, IF16,

// __float80
W,
Expand Down Expand Up @@ -129,6 +129,7 @@ pub const Suffix = enum {

.{ .I, &.{"I"} },
.{ .IL, &.{ "I", "L" } },
.{ .IF16, &.{ "I", "F16" } },
.{ .IF, &.{ "I", "F" } },
.{ .IW, &.{ "I", "W" } },
.{ .IF128, &.{ "I", "F128" } },
Expand Down Expand Up @@ -161,7 +162,7 @@ pub const Suffix = enum {

pub fn isImaginary(suffix: Suffix) bool {
return switch (suffix) {
.I, .IL, .IF, .IU, .IUL, .ILL, .IULL, .IWB, .IUWB, .IF128, .IQ, .IW => true,
.I, .IL, .IF, .IU, .IUL, .ILL, .IULL, .IWB, .IUWB, .IF128, .IQ, .IW, .IF16 => true,
.None, .L, .F16, .F, .U, .UL, .LL, .ULL, .WB, .UWB, .F128, .Q, .W => false,
};
}
Expand All @@ -170,7 +171,7 @@ pub const Suffix = enum {
return switch (suffix) {
.None, .L, .LL, .I, .IL, .ILL, .WB, .IWB => true,
.U, .UL, .ULL, .IU, .IUL, .IULL, .UWB, .IUWB => false,
.F, .IF, .F16, .F128, .IF128, .Q, .IQ, .W, .IW => unreachable,
.F, .IF, .F16, .F128, .IF128, .Q, .IQ, .W, .IW, .IF16 => unreachable,
};
}

Expand Down
Loading

0 comments on commit d662d5f

Please sign in to comment.