giving the parser a stack limit
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
This commit is contained in:
parent
6b83b5b320
commit
daeff94400
11
build.zig
11
build.zig
|
@ -90,7 +90,9 @@ pub fn build(b: *std.Build) !void {
|
||||||
});
|
});
|
||||||
|
|
||||||
test_run.root_module.addAnonymousImport("utils", .{ .root_source_file = b.path("src/tests.zig") });
|
test_run.root_module.addAnonymousImport("utils", .{ .root_source_file = b.path("src/tests.zig") });
|
||||||
test_step.dependOn(&b.addRunArtifact(test_run).step);
|
const run = b.addRunArtifact(test_run);
|
||||||
|
run.has_side_effects = true;
|
||||||
|
test_step.dependOn(&run.step);
|
||||||
|
|
||||||
break :example_tests;
|
break :example_tests;
|
||||||
}
|
}
|
||||||
|
@ -107,7 +109,9 @@ pub fn build(b: *std.Build) !void {
|
||||||
});
|
});
|
||||||
|
|
||||||
test_run.root_module.addAnonymousImport("utils", .{ .root_source_file = b.path("src/tests.zig") });
|
test_run.root_module.addAnonymousImport("utils", .{ .root_source_file = b.path("src/tests.zig") });
|
||||||
test_step.dependOn(&b.addRunArtifact(test_run).step);
|
const run = b.addRunArtifact(test_run);
|
||||||
|
run.has_side_effects = true;
|
||||||
|
test_step.dependOn(&run.step);
|
||||||
|
|
||||||
break :example_tests;
|
break :example_tests;
|
||||||
}
|
}
|
||||||
|
@ -180,7 +184,8 @@ pub fn build(b: *std.Build) !void {
|
||||||
});
|
});
|
||||||
|
|
||||||
const run_gen_finding_tests = b.addRunArtifact(gen_finding_tests);
|
const run_gen_finding_tests = b.addRunArtifact(gen_finding_tests);
|
||||||
run_gen_finding_tests.addDirectoryArg(out_dir);
|
run_gen_finding_tests.has_side_effects = true;
|
||||||
|
run_gen_finding_tests.addDirectoryArg(b.path("root/hblang/.zig-cache/o/a1c0c4a0024f9744bb088d2785d0c92d/findings"));
|
||||||
run_gen_finding_tests.addArg("enabled");
|
run_gen_finding_tests.addArg("enabled");
|
||||||
const fuzz_out = run_gen_finding_tests.addOutputFileArg("fuzz_finding_tests.zig");
|
const fuzz_out = run_gen_finding_tests.addOutputFileArg("fuzz_finding_tests.zig");
|
||||||
|
|
||||||
|
|
|
@ -51,5 +51,4 @@ fi
|
||||||
|
|
||||||
ssh -p $SSH_ARGS "export IN_SCRIPT=true; export DURATION=$DURATION; $(cat $0)"
|
ssh -p $SSH_ARGS "export IN_SCRIPT=true; export DURATION=$DURATION; $(cat $0)"
|
||||||
|
|
||||||
sleep $(($DURATION + 3))
|
scp -P $SSH_ARGS:/root/hblang/zig-out/arch.gz zig-out/
|
||||||
scp -P $SSH_ARGS:/root/hblang/zig-out/fuzz_finding_tests.zig ./src/fuzz_finding_tests.zig
|
|
||||||
|
|
|
@ -150,21 +150,22 @@ pub fn ralloc(comptime Mach: type, func: *graph.Func(Mach)) []u16 {
|
||||||
const colors = func.arena.allocator().alloc(u16, func.instr_count) catch unreachable;
|
const colors = func.arena.allocator().alloc(u16, func.instr_count) catch unreachable;
|
||||||
@memset(colors, sentinel);
|
@memset(colors, sentinel);
|
||||||
|
|
||||||
|
var selection_set = Set.initEmpty(tmp.arena.allocator(), func.instr_count + 64) catch unreachable;
|
||||||
for (interference_table, colors, instrs, 0..) |it_row, *color, instr, i| {
|
for (interference_table, colors, instrs, 0..) |it_row, *color, instr, i| {
|
||||||
const set = u256;
|
@memset(Block.setMasks(selection_set), 0);
|
||||||
|
|
||||||
var selection_set: set = 0;
|
|
||||||
var iter = it_row.iterator(.{});
|
var iter = it_row.iterator(.{});
|
||||||
while (iter.next()) |e| if (i != e) {
|
while (iter.next()) |e| if (i != e) {
|
||||||
if (colors[e] != sentinel) selection_set |= @as(set, 1) << @intCast(colors[e]);
|
if (colors[e] != sentinel) selection_set.set(colors[e]);
|
||||||
selection_set |= instrs[e].def.clobbers();
|
Block.setMasks(selection_set)[0] |= instrs[e].def.clobbers();
|
||||||
};
|
};
|
||||||
|
|
||||||
const bias = instr.def.regBias();
|
const bias = instr.def.regBias();
|
||||||
if (bias != null and selection_set & (@as(set, 1) << @intCast(bias.?)) == 0) {
|
if (bias != null and !selection_set.isSet(bias.?)) {
|
||||||
color.* = bias.?;
|
color.* = bias.?;
|
||||||
} else {
|
} else {
|
||||||
color.* = @ctz(~selection_set);
|
var it = selection_set.iterator(.{ .kind = .unset });
|
||||||
|
color.* = @intCast(it.next().?);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -227,6 +227,7 @@ pub fn init(
|
||||||
var lexer = Lexer.init(opts.code, 0);
|
var lexer = Lexer.init(opts.code, 0);
|
||||||
|
|
||||||
var parser = Parser{
|
var parser = Parser{
|
||||||
|
.stack_base = @frameAddress(),
|
||||||
.arena = arena,
|
.arena = arena,
|
||||||
.path = opts.path,
|
.path = opts.path,
|
||||||
.current = opts.current,
|
.current = opts.current,
|
||||||
|
|
|
@ -25,9 +25,12 @@ diagnostics: std.io.AnyWriter,
|
||||||
list_pos: Ast.Pos = undefined,
|
list_pos: Ast.Pos = undefined,
|
||||||
deferring: bool = false,
|
deferring: bool = false,
|
||||||
errored: bool = false,
|
errored: bool = false,
|
||||||
|
stack_base: usize,
|
||||||
|
|
||||||
|
const stack_limit = 1024 * (512 + 256);
|
||||||
|
|
||||||
const Parser = @This();
|
const Parser = @This();
|
||||||
const Error = error{UnexpectedToken} || std.mem.Allocator.Error;
|
const Error = error{ UnexpectedToken, StackOverflow } || std.mem.Allocator.Error;
|
||||||
|
|
||||||
const Sym = struct {
|
const Sym = struct {
|
||||||
id: Ident,
|
id: Ident,
|
||||||
|
@ -286,7 +289,18 @@ fn parseCtorField(self: *Parser) Error!Ast.CtorField {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn checkStack(self: *Parser) !void {
|
||||||
|
const distance = @abs(@as(isize, @bitCast(@frameAddress() -% self.stack_base)));
|
||||||
|
if (distance > stack_limit) {
|
||||||
|
self.report(self.cur.pos, "the tree is too deep", .{});
|
||||||
|
return error.StackOverflow;
|
||||||
|
}
|
||||||
|
//std.debug.print("{}\n", .{distance});
|
||||||
|
}
|
||||||
|
|
||||||
fn parseUnitWithoutTail(self: *Parser) Error!Id {
|
fn parseUnitWithoutTail(self: *Parser) Error!Id {
|
||||||
|
try self.checkStack();
|
||||||
|
|
||||||
var token = self.advance();
|
var token = self.advance();
|
||||||
const scope_frame = self.active_syms.items.len;
|
const scope_frame = self.active_syms.items.len;
|
||||||
return try self.store.allocDyn(self.arena.allocator(), switch (token.kind.expand()) {
|
return try self.store.allocDyn(self.arena.allocator(), switch (token.kind.expand()) {
|
||||||
|
|
|
@ -259,6 +259,7 @@ pub const Id = enum(IdRepr) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn size(self: Id, types: *Types) u64 {
|
pub fn size(self: Id, types: *Types) u64 {
|
||||||
|
// TODO: what about uninhabited types?
|
||||||
return switch (self.data()) {
|
return switch (self.data()) {
|
||||||
.Builtin => |b| switch (b) {
|
.Builtin => |b| switch (b) {
|
||||||
.never, .any => 0,
|
.never, .any => 0,
|
||||||
|
@ -278,7 +279,7 @@ pub const Id = enum(IdRepr) {
|
||||||
.Tuple => |t| {
|
.Tuple => |t| {
|
||||||
var total_size: u64 = 0;
|
var total_size: u64 = 0;
|
||||||
var alignm: u64 = 1;
|
var alignm: u64 = 1;
|
||||||
for (types.store.get(t).fields) |f| {
|
for (t.getFields(types)) |f| {
|
||||||
alignm = @max(alignm, f.ty.alignment(types));
|
alignm = @max(alignm, f.ty.alignment(types));
|
||||||
total_size = std.mem.alignForward(u64, total_size, f.ty.alignment(types));
|
total_size = std.mem.alignForward(u64, total_size, f.ty.alignment(types));
|
||||||
total_size += f.ty.size(types);
|
total_size += f.ty.size(types);
|
||||||
|
|
|
@ -1,2 +1 @@
|
||||||
const utils = @import("utils");
|
const utils = @import("utils");
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue