giving the parser a stack limit

Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
This commit is contained in:
Jakub Doka 2025-03-17 20:57:19 +01:00
parent 6b83b5b320
commit b858a8cc1d
No known key found for this signature in database
GPG key ID: C6E9A89936B8C143
7 changed files with 33 additions and 14 deletions

View file

@ -90,7 +90,9 @@ pub fn build(b: *std.Build) !void {
});
test_run.root_module.addAnonymousImport("utils", .{ .root_source_file = b.path("src/tests.zig") });
test_step.dependOn(&b.addRunArtifact(test_run).step);
const run = b.addRunArtifact(test_run);
run.has_side_effects = true;
test_step.dependOn(&run.step);
break :example_tests;
}
@ -107,7 +109,9 @@ pub fn build(b: *std.Build) !void {
});
test_run.root_module.addAnonymousImport("utils", .{ .root_source_file = b.path("src/tests.zig") });
test_step.dependOn(&b.addRunArtifact(test_run).step);
const run = b.addRunArtifact(test_run);
run.has_side_effects = true;
test_step.dependOn(&run.step);
break :example_tests;
}
@ -180,7 +184,7 @@ pub fn build(b: *std.Build) !void {
});
const run_gen_finding_tests = b.addRunArtifact(gen_finding_tests);
run_gen_finding_tests.addDirectoryArg(out_dir);
run_gen_finding_tests.addDirectoryArg(b.path("root/hblang/.zig-cache/o/a1c0c4a0024f9744bb088d2785d0c92d/findings"));
run_gen_finding_tests.addArg("enabled");
const fuzz_out = run_gen_finding_tests.addOutputFileArg("fuzz_finding_tests.zig");

View file

@ -51,5 +51,4 @@ fi
ssh -p $SSH_ARGS "export IN_SCRIPT=true; export DURATION=$DURATION; $(cat $0)"
sleep $(($DURATION + 3))
scp -P $SSH_ARGS:/root/hblang/zig-out/fuzz_finding_tests.zig ./src/fuzz_finding_tests.zig
scp -P $SSH_ARGS:/root/hblang/zig-out/arch.gz zig-out/

View file

@ -150,21 +150,22 @@ pub fn ralloc(comptime Mach: type, func: *graph.Func(Mach)) []u16 {
const colors = func.arena.allocator().alloc(u16, func.instr_count) catch unreachable;
@memset(colors, sentinel);
var selection_set = Set.initEmpty(tmp.arena.allocator(), func.instr_count + 64) catch unreachable;
for (interference_table, colors, instrs, 0..) |it_row, *color, instr, i| {
const set = u256;
@memset(Block.setMasks(selection_set), 0);
var selection_set: set = 0;
var iter = it_row.iterator(.{});
while (iter.next()) |e| if (i != e) {
if (colors[e] != sentinel) selection_set |= @as(set, 1) << @intCast(colors[e]);
selection_set |= instrs[e].def.clobbers();
if (colors[e] != sentinel) selection_set.set(colors[e]);
Block.setMasks(selection_set)[0] |= instrs[e].def.clobbers();
};
const bias = instr.def.regBias();
if (bias != null and selection_set & (@as(set, 1) << @intCast(bias.?)) == 0) {
if (bias != null and !selection_set.isSet(bias.?)) {
color.* = bias.?;
} else {
color.* = @ctz(~selection_set);
var it = selection_set.iterator(.{ .kind = .unset });
color.* = @intCast(it.next().?);
}
}

View file

@ -227,6 +227,7 @@ pub fn init(
var lexer = Lexer.init(opts.code, 0);
var parser = Parser{
.stack_base = @frameAddress(),
.arena = arena,
.path = opts.path,
.current = opts.current,

View file

@ -25,9 +25,12 @@ diagnostics: std.io.AnyWriter,
list_pos: Ast.Pos = undefined,
deferring: bool = false,
errored: bool = false,
stack_base: usize,
const stack_limit = 1024 * (512 + 256);
const Parser = @This();
const Error = error{UnexpectedToken} || std.mem.Allocator.Error;
const Error = error{ UnexpectedToken, StackOverflow } || std.mem.Allocator.Error;
const Sym = struct {
id: Ident,
@ -286,7 +289,18 @@ fn parseCtorField(self: *Parser) Error!Ast.CtorField {
};
}
fn checkStack(self: *Parser) !void {
const distance = @abs(@as(isize, @bitCast(@frameAddress() -% self.stack_base)));
if (distance > stack_limit) {
self.report(self.cur.pos, "the tree is too deep", .{});
return error.StackOverflow;
}
//std.debug.print("{}\n", .{distance});
}
fn parseUnitWithoutTail(self: *Parser) Error!Id {
try self.checkStack();
var token = self.advance();
const scope_frame = self.active_syms.items.len;
return try self.store.allocDyn(self.arena.allocator(), switch (token.kind.expand()) {

View file

@ -259,6 +259,7 @@ pub const Id = enum(IdRepr) {
}
pub fn size(self: Id, types: *Types) u64 {
// TODO: what about uninhabited types?
return switch (self.data()) {
.Builtin => |b| switch (b) {
.never, .any => 0,
@ -278,7 +279,7 @@ pub const Id = enum(IdRepr) {
.Tuple => |t| {
var total_size: u64 = 0;
var alignm: u64 = 1;
for (types.store.get(t).fields) |f| {
for (t.getFields(types)) |f| {
alignm = @max(alignm, f.ty.alignment(types));
total_size = std.mem.alignForward(u64, total_size, f.ty.alignment(types));
total_size += f.ty.size(types);

View file

@ -1,2 +1 @@
const utils = @import("utils");