fixed more bugs

Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
This commit is contained in:
Jakub Doka 2025-03-17 17:02:46 +01:00
parent cfb17ed01b
commit a23c48d29e
No known key found for this signature in database
GPG key ID: C6E9A89936B8C143
15 changed files with 304 additions and 235 deletions

View file

@ -80,7 +80,7 @@ pub fn build(b: *std.Build) !void {
const out = run_gen.addOutputFileArg("tests.zig");
const test_run = b.addTest(.{
.name = "vendored_tests",
.name = "example_tests",
.root_source_file = out,
.target = b.graph.host,
.optimize = optimize,
@ -184,10 +184,6 @@ pub fn build(b: *std.Build) !void {
run_gen_finding_tests.addArg("enabled");
const fuzz_out = run_gen_finding_tests.addOutputFileArg("fuzz_finding_tests.zig");
const cleanup = b.addSystemCommand(&.{ "killall", "afl-fuzz" });
if (fuzzes != 1) run_gen_finding_tests.step.dependOn(&cleanup.step);
run_gen_finding_tests.has_side_effects = true;
for (0..fuzzes) |i| {
const run_afl = b.addSystemCommand(&.{"afl-fuzz"});
@ -208,8 +204,6 @@ pub fn build(b: *std.Build) !void {
if (i != 0) _ = run_afl.captureStdOut();
run_gen_finding_tests.step.dependOn(&run_afl.step);
if (i == 0 and fuzzes != 1) cleanup.step.dependOn(&run_afl.step);
}
const fuzz_step = b.step("fuzz", "run the fuzzer");

8
foo.sh
View file

@ -1,8 +0,0 @@
afl-fuzz -i /home/mlokis/personal/zig/hblang/.zig-cache/o/0e04107b2ba7ec222ca565c88459075b/fuzz-cases -o /home/mlokis/personal/zig/hblang/.zig-cache/o/a1c0c4a0024f9744bb088d2785d0c92d/findings -S worker1 -x /home/mlokis/personal/zig/hblang/.zig-cache/o/0e04107b2ba7ec222ca565c88459075b/hblang.dict -- /home/mlokis/personal/zig/hblang/.zig-cache/o/dc1b80ba16a50db4138d4ff92006b073/fuzz &
afl-fuzz -i /home/mlokis/personal/zig/hblang/.zig-cache/o/0e04107b2ba7ec222ca565c88459075b/fuzz-cases -o /home/mlokis/personal/zig/hblang/.zig-cache/o/a1c0c4a0024f9744bb088d2785d0c92d/findings -S worker2 -x /home/mlokis/personal/zig/hblang/.zig-cache/o/0e04107b2ba7ec222ca565c88459075b/hblang.dict -- /home/mlokis/personal/zig/hblang/.zig-cache/o/dc1b80ba16a50db4138d4ff92006b073/fuzz &
afl-fuzz -i /home/mlokis/personal/zig/hblang/.zig-cache/o/0e04107b2ba7ec222ca565c88459075b/fuzz-cases -o /home/mlokis/personal/zig/hblang/.zig-cache/o/a1c0c4a0024f9744bb088d2785d0c92d/findings -S worker3 -x /home/mlokis/personal/zig/hblang/.zig-cache/o/0e04107b2ba7ec222ca565c88459075b/hblang.dict -- /home/mlokis/personal/zig/hblang/.zig-cache/o/dc1b80ba16a50db4138d4ff92006b073/fuzz &
afl-fuzz -i /home/mlokis/personal/zig/hblang/.zig-cache/o/0e04107b2ba7ec222ca565c88459075b/fuzz-cases -o /home/mlokis/personal/zig/hblang/.zig-cache/o/a1c0c4a0024f9744bb088d2785d0c92d/findings -M worker0 -x /home/mlokis/personal/zig/hblang/.zig-cache/o/0e04107b2ba7ec222ca565c88459075b/hblang.dict -- /home/mlokis/personal/zig/hblang/.zig-cache/o/dc1b80ba16a50db4138d4ff92006b073/fuzz &
sleep 5
killall afl-fuzz

View file

@ -9,7 +9,6 @@ if [[ -n "$IN_SCRIPT" ]]; then
if ! command -v zig >/dev/null 2>&1; then
wget https://ziglang.org/download/0.14.0/zig-linux-x86_64-0.14.0.tar.xz
tar -xf zig-linux-x86_64-0.14.0.tar.xz
ln -s /root/zig-linux-x86_64-0.14.0/zig /usr/bin/
fi
@ -43,8 +42,8 @@ if [[ -n "$IN_SCRIPT" ]]; then
systemctl daemon-reload
systemctl enable $SERVICE_NAME
systemctl stop $SERVICE_NAME
systemctl start $SERVICE_NAME
systemctl stop $SERVICE_NAME
systemctl start $SERVICE_NAME
exit
fi

View file

@ -87,9 +87,9 @@ pub const EmitOptions = struct {
entry: bool = false,
optimizations: struct {
verbose: bool = false,
dead_code_fuel: usize = 1000,
dead_code_fuel: usize = 10000,
mem2reg: bool = true,
peephole_fuel: usize = 1000,
peephole_fuel: usize = 10000,
do_gcm: bool = true,
arena: ?*root.Arena = null,
error_buf: ?*std.ArrayListUnmanaged(static_anal.Error) = null,

View file

@ -651,7 +651,7 @@ pub fn Func(comptime MachNode: type) type {
}
pub fn kill(self: *Node) void {
if (self.output_len != 0) utils.panic("{s}\n", .{self.outputs()});
if (self.output_len != 0) utils.panic("{any} {}\n", .{ self.outputs(), self });
std.debug.assert(self.output_len == 0);
for (self.inputs()) |oi| if (oi) |i| {
i.removeUse(self);
@ -961,23 +961,38 @@ pub fn Func(comptime MachNode: type) type {
}
pub fn subsumeNoKill(self: *Self, this: *Node, target: *Node) void {
std.debug.assert(this != target);
//std.debug.print("{} {} {any}\n", .{ target, this, target.outputs() });
for (self.arena.allocator().dupe(*Node, target.outputs()) catch unreachable) |use| {
if (use.id == std.math.maxInt(u16)) continue;
const index = std.mem.indexOfScalar(?*Node, use.inputs(), target) orelse {
utils.panic("{} {any} {}", .{ this, target.outputs(), use });
};
_ = self.setInput(use, index, this);
if (use == target) {
target.inputs()[index] = null;
target.removeUse(target);
} else {
_ = self.setInput(use, index, this);
}
}
var iter = self.interner.iterator();
while (iter.next()) |e| std.debug.assert(e.key_ptr.node.id != std.math.maxInt(u16));
if (@import("builtin").mode == .Debug) {
var iter = self.interner.iterator();
while (iter.next()) |e| std.debug.assert(e.key_ptr.node.id != std.math.maxInt(u16));
}
//if (target.outputs().len != 0)
// utils.panic("-- {any}\n", .{target.outputs()})
//else
// std.debug.print("--\n", .{});
}
pub fn subsume(self: *Self, this: *Node, target: *Node) void {
if (this.sloc == Sloc.none) this.sloc = target.sloc;
self.subsumeNoKill(this, target);
self.uninternNode(target);
self.subsumeNoKill(this, target);
target.kill();
}

View file

@ -484,10 +484,10 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
Filled: Ast.Id,
};
const fields = @TypeOf(struct_ty).Data.getFields(struct_ty, self.types);
const fields = struct_ty.getFields(self.types);
const slots = tmp.arena.alloc(FillSlot, fields.len);
{
var iter = @TypeOf(struct_ty).Data.offsetIter(struct_ty, self.types);
var iter = struct_ty.offsetIter(self.types);
iter.offset = offset_cursor;
for (slots) |*s| {
const elem = iter.next().?;
@ -543,7 +543,7 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
return self.report(expr, "union constructor must initialize only one field", .{});
}
const fields = @TypeOf(union_ty).Data.getFields(union_ty, self.types);
const fields = union_ty.getFields(self.types);
const field_ast = ast.exprs.view(e.fields)[0];
const fname = ast.tokenSrc(field_ast.pos.index);
@ -608,15 +608,15 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
}
const struct_ty = ty.data().Struct;
if (e.fields.len() != @TypeOf(struct_ty).Data.getFields(struct_ty, self.types).len) {
if (e.fields.len() != struct_ty.getFields(self.types).len) {
return self.report(
e.pos,
"{} has {} fields, but tuple constructor has {} values",
.{ ty, @TypeOf(struct_ty).Data.getFields(struct_ty, self.types).len, e.fields.len() },
.{ ty, struct_ty.getFields(self.types).len, e.fields.len() },
);
}
var iter = @TypeOf(struct_ty).Data.offsetIter(struct_ty, self.types);
var iter = struct_ty.offsetIter(self.types);
iter.offset = init_offset;
for (ast.exprs.view(e.fields)) |field| {
const elem = iter.next().?;
@ -947,27 +947,27 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
switch (base.ty.data()) {
.Struct => |struct_ty| {
var iter = @TypeOf(struct_ty).Data.offsetIter(struct_ty, self.types);
var iter = struct_ty.offsetIter(self.types);
const ftype, const offset = while (iter.next()) |elem| {
if (std.mem.eql(u8, fname, elem.field.name)) break .{ elem.field.ty, elem.offset };
} else {
return self.report(
e.field,
"no such field on {}, but it has: {s}",
.{ base.ty, listFileds(tmp.arena, @TypeOf(struct_ty).Data.getFields(struct_ty, self.types)) },
.{ base.ty, listFileds(tmp.arena, struct_ty.getFields(self.types)) },
);
};
return .mkp(ftype, self.bl.addFieldOffset(base.id.Pointer, @intCast(offset)));
},
.Union => |union_ty| {
const ftype = for (@TypeOf(union_ty).Data.getFields(union_ty, self.types)) |tf| {
const ftype = for (union_ty.getFields(self.types)) |tf| {
if (std.mem.eql(u8, fname, tf.name)) break tf.ty;
} else {
return self.report(
e.field,
"no such field on {}, but it has: {s}",
.{ base.ty, listFileds(tmp.arena, @TypeOf(union_ty).Data.getFields(union_ty, self.types)) },
.{ base.ty, listFileds(tmp.arena, union_ty.getFields(self.types)) },
);
};
@ -1055,7 +1055,7 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
inline .Struct, .Tuple => |struct_ty| {
const idx = try self.partialEval(e.subscript, idx_value.getValue(self));
var iter = @TypeOf(struct_ty).Data.offsetIter(struct_ty, self.types);
var iter = struct_ty.offsetIter(self.types);
if (idx >= iter.fields.len) {
return self.report(e.subscript, "struct has only {} fields, but idnex is {}", .{ iter.fields.len, idx });
@ -1144,7 +1144,7 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
return self.report(e.value, "can only match on enums right now, {} is not", .{value.ty});
};
const fields = @TypeOf(value.ty.data().Enum).Data.getFields(value.ty.data().Enum, self.types);
const fields = value.ty.data().Enum.getFields(self.types);
if (fields.len == 0) return error.Unreachable;
@ -1510,6 +1510,19 @@ pub fn typeCheck(self: *Codegen, expr: anytype, got: *Value, expected: Types.Id)
) };
}
if (got.ty.data() == .Enum) {
const len = got.ty.data().Enum.getFields(self.types).len;
if (len <= 1) {
got.id = .{ .Value = self.bl.addIntImm(self.abiCata(expected).ByValue, 0) };
} else if (got.ty.size(self.types) < expected.size(self.types)) {
got.id = .{ .Value = self.bl.addUnOp(
.uext,
self.abiCata(expected).ByValue,
got.getValue(self),
) };
}
}
got.ty = expected;
}
@ -1554,7 +1567,7 @@ pub fn lexemeToBinOpLow(self: Lexer.Lexeme, ty: Types.Id) ?graph.BinOp {
fn emitStructFoldOp(self: *Codegen, pos: anytype, ty: utils.EntId(root.frontend.types.Struct), op: Lexer.Lexeme, lhs: *Node, rhs: *Node) !?*Node {
var fold: ?*Node = null;
var iter = @TypeOf(ty).Data.offsetIter(ty, self.types);
var iter = ty.offsetIter(self.types);
while (iter.next()) |elem| {
const lhs_loc = self.bl.addFieldOffset(lhs, @intCast(elem.offset));
const rhs_loc = self.bl.addFieldOffset(rhs, @intCast(elem.offset));
@ -1574,7 +1587,7 @@ fn emitStructFoldOp(self: *Codegen, pos: anytype, ty: utils.EntId(root.frontend.
}
fn emitStructOp(self: *Codegen, pos: anytype, ty: utils.EntId(root.frontend.types.Struct), op: Lexer.Lexeme, loc: *Node, lhs: *Node, rhs: *Node) !void {
var iter = @TypeOf(ty).Data.offsetIter(ty, self.types);
var iter = ty.offsetIter(self.types);
while (iter.next()) |elem| {
const field_loc = self.bl.addFieldOffset(loc, @intCast(elem.offset));
const lhs_loc = self.bl.addFieldOffset(lhs, @intCast(elem.offset));
@ -1602,7 +1615,12 @@ pub fn emitGenericStore(self: *Codegen, loc: *Node, value: *Value) void {
}
pub fn resolveAnonTy(self: *Codegen, expr: Ast.Id) !Types.Id {
return self.types.ct.evalTy("", .{ .Tmp = self }, expr);
const prev_name = self.name;
defer self.name = prev_name;
self.name = "";
var vl = try self.emitTyped(.{}, .type, expr);
return self.unwrapTyConst(expr, &vl);
}
pub fn resolveTy(self: *Codegen, name: []const u8, expr: Ast.Id) !Types.Id {
@ -1636,7 +1654,7 @@ pub fn lookupScopeItem(self: *Codegen, pos: Ast.Pos, bsty: Types.Id, name: []con
};
const ast = self.types.getFile(other_file);
if (bsty.data() == .Enum) {
const fields = @TypeOf(bsty.data().Enum).Data.getFields(bsty.data().Enum, self.types);
const fields = bsty.data().Enum.getFields(self.types);
for (fields, 0..) |f, i| {
if (std.mem.eql(u8, f.name, name))
@ -1658,6 +1676,11 @@ pub fn lookupScopeItem(self: *Codegen, pos: Ast.Pos, bsty: Types.Id, name: []con
pub fn resolveGlobal(self: *Codegen, name: []const u8, bsty: Types.Id, ast: *const Ast, decl: Ast.Id, path: []Ast.Pos) EmitError!Value {
const vari = ast.exprs.getTyped(.Decl, decl).?;
// NOTE: we do this here particularly because the explicit type can contain a cycle
try self.types.ct.addInProgress(vari.value, bsty.file(self.types).?);
defer _ = self.types.ct.in_progress.pop().?;
const ty = if (vari.ty.tag() == .Void) null else try self.resolveAnonTy(vari.ty);
const global_ty, const new = self.types.resolveGlobal(bsty, name, vari.value);
@ -1810,7 +1833,7 @@ pub fn instantiateTemplate(
caller: ?Value,
tmp: *utils.Arena,
expr: Ast.Id,
e: std.meta.TagPayload(Ast.Expr, .Call),
e: Ast.Store.TagPayload(.Call),
typ: Types.Id,
) !struct { []Value, Types.Id } {
const tmpl = self.types.store.get(typ.data().Template).*;
@ -2296,7 +2319,7 @@ fn emitDirective(self: *Codegen, ctx: Ctx, expr: Ast.Id, e: *const Ast.Store.Tag
break :dt std.fmt.allocPrint(self.types.arena.allocator(), "{}", .{ty.fmt(self.types)}) catch unreachable;
} else switch (value.ty.data()) {
.Enum => |enum_ty| dt: {
const fields = @TypeOf(enum_ty).Data.getFields(enum_ty, self.types);
const fields = enum_ty.getFields(self.types);
if (fields.len == 1) {
break :dt fields[0].name;
}

View file

@ -299,6 +299,20 @@ pub fn inferType(self: *Comptime, name: []const u8, scope: Codegen.Scope, ctx: C
})[1];
}
pub fn addInProgress(self: *Comptime, expr: Ast.Id, file: Types.File) !void {
const types = self.getTypes();
for (self.in_progress.items, 0..) |p, i| {
if (std.meta.eql(p, .{ .ast = expr, .file = file })) {
for (self.in_progress.items[i..]) |lc| {
types.report(lc.file, lc.ast, "cycle goes trough here", .{});
}
return error.Never;
}
}
self.in_progress.append(self.comptime_code.gpa, .{ .ast = expr, .file = file }) catch unreachable;
}
pub fn jitExprLow(
self: *Comptime,
name: []const u8,
@ -316,17 +330,6 @@ pub fn jitExprLow(
var gen = Codegen.init(self.getGpa(), tmp.arena, types, .@"comptime");
defer gen.deinit();
for (self.in_progress.items, 0..) |p, i| {
if (std.meta.eql(p, .{ .ast = value, .file = scope.file(types) })) {
for (self.in_progress.items[i..]) |lc| {
types.report(lc.file, lc.ast, "cycle goes trough here", .{});
}
return error.Never;
}
}
self.in_progress.append(self.comptime_code.gpa, .{ .ast = value, .file = scope.file(types) }) catch unreachable;
defer _ = self.in_progress.pop().?;
gen.only_inference = only_inference;
const reloc_frame = self.comptime_code.global_relocs.items.len;

View file

@ -91,21 +91,6 @@ pub const TypeCtx = struct {
pub const File = enum(u16) { root, _ };
pub const IdRepr = u32;
pub const IdTagRepr = u4;
pub const IfPayloadRepr = u28;
pub const ids = enum {
pub const Ptr = enum(IdRepr) { _ };
pub const Slice = enum(IdRepr) { _ };
pub const Nullable = enum(IdRepr) { _ };
pub const Tuple = enum(IdRepr) { _ };
pub const Enum = enum(IdRepr) { _ };
pub const Union = enum(IdRepr) { _ };
pub const Struct = enum(IdRepr) { _ };
pub const Template = enum(IdRepr) { _ };
pub const Func = enum(IdRepr) { _ };
pub const Global = enum(IdRepr) { _ };
};
pub const Data = utils.EntStore(root.frontend.types).Data;
@ -173,7 +158,7 @@ pub const Id = enum(IdRepr) {
}
pub fn needsTag(self: Id, types: *Types) bool {
return self.data() == .Nullable and !types.store.get(self.data().Nullable).isCompact(types);
return self.data() == .Nullable and !self.data().Nullable.isCompact(types);
}
pub fn firstType(self: Id, types: *Types) Id {
@ -258,7 +243,7 @@ pub const Id = enum(IdRepr) {
pub fn len(self: Id, types: *Types) ?usize {
return switch (self.data()) {
inline .Struct, .Union, .Enum => |s| @TypeOf(s).Data.getFields(s, types).len,
inline .Struct, .Union, .Enum => |s| s.getFields(types).len,
.Slice => |s| types.store.get(s).len,
else => null,
};
@ -285,7 +270,7 @@ pub const Id = enum(IdRepr) {
},
.Pointer => 8,
.Enum => |e| {
const var_count = @TypeOf(e).Data.getFields(e, types).len;
const var_count = e.getFields(types).len;
if (var_count <= 1) return 0;
return std.math.ceilPowerOfTwo(u64, std.mem.alignForward(u64, std.math.log2_int(u64, var_count), 8) / 8) catch unreachable;
},
@ -303,16 +288,16 @@ pub const Id = enum(IdRepr) {
.Union => |u| {
var max_size: u64 = 0;
var alignm: u64 = 1;
for (@TypeOf(u).Data.getFields(u, types)) |f| {
for (u.getFields(types)) |f| {
alignm = @max(alignm, f.ty.alignment(types));
max_size = @max(max_size, f.ty.size(types));
}
max_size = std.mem.alignForward(u64, max_size, alignm);
return max_size;
},
.Struct => |s| @TypeOf(s).Data.getSize(s, types),
.Struct => |s| s.getSize(types),
.Slice => |s| if (types.store.get(s).len) |l| l * types.store.get(s).elem.size(types) else 16,
.Nullable => |n| types.store.get(n).size(types),
.Nullable => |n| n.size(types),
.Global, .Func, .Template => 0,
};
}
@ -322,10 +307,10 @@ pub const Id = enum(IdRepr) {
.Builtin, .Enum => @max(1, self.size(types)),
.Pointer => 8,
.Nullable => |n| types.store.get(n).inner.alignment(types),
.Struct => |s| @TypeOf(s).Data.getAlignment(s, types),
.Struct => |s| s.getAlignment(types),
inline .Union, .Tuple => |s| {
var alignm: u64 = 1;
for (@TypeOf(s).Data.getFields(s, types)) |f| {
for (s.getFields(types)) |f| {
alignm = @max(alignm, f.ty.alignment(types));
}
return alignm;
@ -517,7 +502,7 @@ pub const Abi = enum {
pub fn categorizeAbleosNullable(id: utils.EntId(tys.Nullable), types: *Types) ?Spec {
const nullable = types.store.get(id);
const base_abi = Abi.ableos.categorize(nullable.inner, types) orelse return null;
if (nullable.isCompact(types)) return base_abi;
if (id.isCompact(types)) return base_abi;
if (base_abi == .Imaginary) return .{ .ByValue = .i8 };
if (base_abi == .ByValue) return .{ .ByValuePair = .{
.types = .{ .i8, base_abi.ByValue },
@ -538,7 +523,7 @@ pub const Abi = enum {
}
pub fn categorizeAbleosUnion(id: utils.EntId(tys.Union), types: *Types) ?Spec {
const fields = @TypeOf(id).Data.getFields(id, types);
const fields = id.getFields(types);
if (fields.len == 0) return .Imaginary; // TODO: add .Impossible
const res = Abi.ableos.categorize(fields[0].ty, types) orelse return null;
for (fields[1..]) |f| {
@ -551,16 +536,16 @@ pub const Abi = enum {
pub fn categorizeAbleosRecord(stru: anytype, types: *Types) Spec {
var res: Spec = .Imaginary;
var offset: u64 = 0;
for (@TypeOf(stru).Data.getFields(stru, types)) |f| {
for (stru.getFields(types)) |f| {
const fspec = Abi.ableos.categorize(f.ty, types) orelse continue;
if (fspec == .Imaginary) continue;
if (fspec == .ByRef) return fspec;
if (res == .Imaginary) {
res = fspec;
offset += f.ty.size(types);
continue;
}
if (fspec == .ByRef) return fspec;
if (fspec == .ByValuePair) return .ByRef;
if (res == .ByValuePair) return .ByRef;
std.debug.assert(res != .ByRef);

View file

@ -60,14 +60,24 @@ pub const Nullable = struct {
}
} = .unresolved,
pub fn isCompact(self: *Nullable, types: *Types) bool {
return self.nieche.offset(types) != null;
}
pub const Id = enum(u32) {
_,
pub fn size(self: *Nullable, types: *Types) u64 {
return self.inner.size(types) +
if (self.isCompact(types)) 0 else self.inner.alignment(types);
}
pub const Data = Nullable;
pub fn isCompact(id: Id, types: *Types) bool {
const self = types.store.get(id);
return self.nieche.offset(types) != null;
}
pub fn size(id: Id, types: *Types) u64 {
const self = types.store.get(id);
return self.inner.size(types) +
if (id.isCompact(types)) 0 else self.inner.alignment(types);
}
};
pub const NiecheSpec = packed struct(u64) {
kind: enum(u1) {
@ -91,9 +101,22 @@ pub const Tuple = struct {
ty: TyId,
};
pub fn getFields(id: utils.EntId(Tuple), types: *Types) []Field {
return types.store.get(id).fields;
}
pub const Id = enum(u32) {
_,
pub const Data = Tuple;
pub fn getFields(id: Id, types: *Types) []Field {
return types.store.get(id).fields;
}
pub fn offsetIter(id: Id, types: *Types) OffIter {
return .{
.types = types,
.fields = getFields(id, types),
};
}
};
pub const OffIter = struct {
types: *Types,
@ -111,13 +134,6 @@ pub const Tuple = struct {
return elem;
}
};
pub fn offsetIter(id: utils.EntId(Tuple), types: *Types) OffIter {
return .{
.types = types,
.fields = getFields(id, types),
};
}
};
pub const Enum = struct {
@ -129,26 +145,32 @@ pub const Enum = struct {
name: []const u8,
};
pub fn getFields(id: utils.EntId(Enum), types: *Types) []const Field {
const self = types.store.get(id);
pub const Id = enum(u32) {
_,
if (self.fields) |f| return f;
const ast = types.getFile(self.key.file);
const enum_ast = ast.exprs.getTyped(.Enum, self.key.ast).?;
pub const Data = Enum;
var count: usize = 0;
for (ast.exprs.view(enum_ast.fields)) |f| count += @intFromBool(f.tag() == .Tag);
pub fn getFields(id: Id, types: *Types) []const Field {
const self = types.store.get(id);
const fields = types.arena.alloc(Field, count);
var i: usize = 0;
for (ast.exprs.view(enum_ast.fields)) |fast| {
if (fast.tag() != .Tag) continue;
fields[i] = .{ .name = ast.tokenSrc(ast.exprs.getTyped(.Tag, fast).?.index + 1) };
i += 1;
if (self.fields) |f| return f;
const ast = types.getFile(self.key.file);
const enum_ast = ast.exprs.getTyped(.Enum, self.key.ast).?;
var count: usize = 0;
for (ast.exprs.view(enum_ast.fields)) |f| count += @intFromBool(f.tag() == .Tag);
const fields = types.arena.alloc(Field, count);
var i: usize = 0;
for (ast.exprs.view(enum_ast.fields)) |fast| {
if (fast.tag() != .Tag) continue;
fields[i] = .{ .name = ast.tokenSrc(ast.exprs.getTyped(.Tag, fast).?.index + 1) };
i += 1;
}
self.fields = fields;
return fields;
}
self.fields = fields;
return fields;
}
};
};
pub const Union = struct {
@ -161,30 +183,37 @@ pub const Union = struct {
ty: TyId,
};
pub fn getFields(id: utils.EntId(Union), types: *Types) []const Field {
const self = types.store.get(id);
pub const Id = enum(u32) {
_,
if (self.fields) |f| return f;
const ast = types.getFile(self.key.file);
const union_ast = ast.exprs.getTyped(.Union, self.key.ast).?;
pub const Data = Union;
var count: usize = 0;
for (ast.exprs.view(union_ast.fields)) |f| count += @intFromBool(if (ast.exprs.getTyped(.Decl, f)) |b| b.bindings.tag() == .Tag else false);
pub fn getFields(id: Id, types: *Types) []const Field {
const self = types.store.get(id);
const fields = types.arena.alloc(Field, count);
var i: usize = 0;
for (ast.exprs.view(union_ast.fields)) |fast| {
const field = ast.exprs.getTyped(.Decl, fast) orelse continue;
if (field.bindings.tag() != .Tag) continue;
fields[i] = .{
.name = ast.tokenSrc(ast.exprs.getTyped(.Tag, field.bindings).?.index + 1),
.ty = types.ct.evalTy("", .{ .Perm = .init(.{ .Union = id }) }, field.ty) catch .never,
};
i += 1;
if (self.fields) |f| return f;
const ast = types.getFile(self.key.file);
const union_ast = ast.exprs.getTyped(.Union, self.key.ast).?;
var count: usize = 0;
for (ast.exprs.view(union_ast.fields)) |f| count +=
@intFromBool(if (ast.exprs.getTyped(.Decl, f)) |b| b.bindings.tag() == .Tag else false);
const fields = types.arena.alloc(Field, count);
var i: usize = 0;
for (ast.exprs.view(union_ast.fields)) |fast| {
const field = ast.exprs.getTyped(.Decl, fast) orelse continue;
if (field.bindings.tag() != .Tag) continue;
fields[i] = .{
.name = ast.tokenSrc(ast.exprs.getTyped(.Tag, field.bindings).?.index + 1),
.ty = types.ct.evalTy("", .{ .Perm = .init(.{ .Union = id }) }, field.ty) catch .never,
};
i += 1;
}
self.fields = fields;
return fields;
}
self.fields = fields;
return fields;
}
};
};
pub const Struct = struct {
@ -200,104 +229,110 @@ pub const Struct = struct {
defalut_value: ?utils.EntId(Global) = null,
};
pub fn getSize(id: utils.EntId(Struct), types: *Types) u64 {
const self = types.store.get(id);
pub const Id = enum(u32) {
_,
if (self.size) |a| return a;
pub const Data = Struct;
if (@hasField(Field, "alignment")) @compileError("");
const max_alignment = getAlignment(id, types);
pub fn getSize(id: Id, types: *Types) u64 {
const self = types.store.get(id);
var siz: u64 = 0;
for (getFields(id, types)) |f| {
siz = std.mem.alignForward(u64, siz, @min(max_alignment, f.ty.alignment(types)));
siz += f.ty.size(types);
}
siz = std.mem.alignForward(u64, siz, max_alignment);
return siz;
}
if (self.size) |a| return a;
pub fn getAlignment(id: utils.EntId(Struct), types: *Types) u64 {
const self = types.store.get(id);
if (@hasField(Field, "alignment")) @compileError("");
const max_alignment = getAlignment(id, types);
if (self.alignment) |a| return a;
const ast = types.getFile(self.key.file);
const struct_ast = ast.exprs.getTyped(.Struct, self.key.ast).?;
if (struct_ast.alignment.tag() != .Void) {
if (@hasField(Field, "alignment")) @compileError("assert fields <= alignment then base alignment");
self.alignment = @bitCast(types.ct.evalIntConst(.{ .Perm = .init(.{ .Struct = id }) }, struct_ast.alignment) catch 1);
return self.alignment.?;
var siz: u64 = 0;
for (getFields(id, types)) |f| {
siz = std.mem.alignForward(u64, siz, @min(max_alignment, f.ty.alignment(types)));
siz += f.ty.size(types);
}
siz = std.mem.alignForward(u64, siz, max_alignment);
return siz;
}
var alignm: u64 = 1;
for (getFields(id, types)) |f| {
alignm = @max(alignm, f.ty.alignment(types));
pub fn getAlignment(id: Id, types: *Types) u64 {
const self = types.store.get(id);
if (self.alignment) |a| return a;
const ast = types.getFile(self.key.file);
const struct_ast = ast.exprs.getTyped(.Struct, self.key.ast).?;
if (struct_ast.alignment.tag() != .Void) {
if (@hasField(Field, "alignment")) @compileError("assert fields <= alignment then base alignment");
self.alignment = @bitCast(types.ct.evalIntConst(.{ .Perm = .init(.{ .Struct = id }) }, struct_ast.alignment) catch 1);
return self.alignment.?;
}
var alignm: u64 = 1;
for (getFields(id, types)) |f| {
alignm = @max(alignm, f.ty.alignment(types));
}
return alignm;
}
return alignm;
}
pub const OffIter = struct {
types: *Types,
max_align: u64,
fields: []const Field,
offset: u64 = 0,
pub const OffIter = struct {
types: *Types,
max_align: u64,
fields: []const Field,
offset: u64 = 0,
pub const Elem = struct { field: *const Field, offset: u64 };
pub const Elem = struct { field: *const Field, offset: u64 };
pub fn next(self: *OffIter) ?Elem {
if (self.fields.len == 0) return null;
self.offset = std.mem.alignForward(u64, self.offset, @min(self.max_align, self.fields[0].ty.alignment(self.types)));
const elem = Elem{ .field = &self.fields[0], .offset = self.offset };
self.fields = self.fields[1..];
self.offset += elem.field.ty.size(self.types);
return elem;
pub fn next(self: *OffIter) ?Elem {
if (self.fields.len == 0) return null;
self.offset = std.mem.alignForward(u64, self.offset, @min(self.max_align, self.fields[0].ty.alignment(self.types)));
const elem = Elem{ .field = &self.fields[0], .offset = self.offset };
self.fields = self.fields[1..];
self.offset += elem.field.ty.size(self.types);
return elem;
}
};
pub fn offsetIter(id: Id, types: *Types) OffIter {
return .{ .types = types, .fields = getFields(id, types), .max_align = getAlignment(id, types) };
}
pub fn getFields(id: Id, types: *Types) []const Field {
const self = types.store.get(id);
if (self.fields) |f| return f;
const ast = types.getFile(self.key.file);
const struct_ast = ast.exprs.getTyped(.Struct, self.key.ast).?;
var count: usize = 0;
for (ast.exprs.view(struct_ast.fields)) |f| count += @intFromBool(if (ast.exprs.getTyped(.Decl, f)) |b| b.bindings.tag() == .Tag else false);
const fields = types.arena.alloc(Field, count);
var i: usize = 0;
for (ast.exprs.view(struct_ast.fields)) |fast| {
const field = ast.exprs.getTyped(.Decl, fast) orelse continue;
if (field.bindings.tag() != .Tag) continue;
const name = ast.tokenSrc(ast.exprs.getTyped(.Tag, field.bindings).?.index + 1);
const ty = types.ct.evalTy("", .{ .Perm = .init(.{ .Struct = id }) }, field.ty) catch .never;
fields[i] = .{ .name = name, .ty = ty };
if (field.value.tag() != .Void) {
const value = types.store.add(types.arena.allocator(), Global{
.key = .{
.file = self.key.file,
.name = name,
.scope = .init(.{ .Struct = id }),
.ast = field.value,
.captures = &.{},
},
});
types.ct.evalGlobal(name, value, ty, field.value) catch {};
fields[i].defalut_value = value;
}
i += 1;
}
self.fields = fields;
return fields;
}
};
pub fn offsetIter(id: utils.EntId(Struct), types: *Types) OffIter {
return .{ .types = types, .fields = getFields(id, types), .max_align = getAlignment(id, types) };
}
pub fn getFields(id: utils.EntId(Struct), types: *Types) []const Field {
const self = types.store.get(id);
if (self.fields) |f| return f;
const ast = types.getFile(self.key.file);
const struct_ast = ast.exprs.getTyped(.Struct, self.key.ast).?;
var count: usize = 0;
for (ast.exprs.view(struct_ast.fields)) |f| count += @intFromBool(if (ast.exprs.getTyped(.Decl, f)) |b| b.bindings.tag() == .Tag else false);
const fields = types.arena.alloc(Field, count);
var i: usize = 0;
for (ast.exprs.view(struct_ast.fields)) |fast| {
const field = ast.exprs.getTyped(.Decl, fast) orelse continue;
if (field.bindings.tag() != .Tag) continue;
const name = ast.tokenSrc(ast.exprs.getTyped(.Tag, field.bindings).?.index + 1);
const ty = types.ct.evalTy("", .{ .Perm = .init(.{ .Struct = id }) }, field.ty) catch .never;
fields[i] = .{ .name = name, .ty = ty };
if (field.value.tag() != .Void) {
const value = types.store.add(types.arena.allocator(), Global{
.key = .{
.file = self.key.file,
.name = name,
.scope = .init(.{ .Struct = id }),
.ast = field.value,
.captures = &.{},
},
});
types.ct.evalGlobal(name, value, ty, field.value) catch {};
fields[i].defalut_value = value;
}
i += 1;
}
self.fields = fields;
return fields;
}
};
pub const Template = struct {

View file

@ -1 +1,2 @@
const utils = @import("utils");

View file

@ -179,12 +179,14 @@ pub fn emitFunc(self: *HbvmGen, func: *Func, opts: Mach.EmitOptions) void {
if (bb.base.kind == .CallEnd) break false;
} else true;
const reg_shift: u8 = 1; //if (is_tail) 12 else 32;
const reg_shift: u8 = 1;
for (self.allocs) |*r| r.* += reg_shift;
const used_registers = if (self.allocs.len == 0) 0 else @min(std.mem.max(u16, self.allocs), max_alloc_regs) -| 31;
const max_reg = std.mem.max(u16, self.allocs);
const used_registers = if (self.allocs.len == 0) 0 else @min(max_reg, max_alloc_regs) -|
(@intFromEnum(isa.Reg.ret_addr) - @intFromBool(is_tail));
const used_reg_size = @as(u16, (used_registers + @intFromBool(!is_tail))) * 8;
const spill_count = (std.mem.max(u16, self.allocs) -| max_alloc_regs) * 8;
const spill_count = (max_reg -| max_alloc_regs) * 8;
var local_size: i64 = 0;
if (func.root.outputs().len > 1) {

View file

@ -7,7 +7,20 @@ pub const fuzz = @import("fuzz.zig");
comptime {
@setEvalBranchQuota(2000);
std.testing.refAllDeclsRecursive(@This());
refAllDeclsRecursive(@This(), 10);
}
pub fn refAllDeclsRecursive(comptime T: type, depth: usize) void {
if (depth == 0) return;
inline for (comptime std.meta.declarations(T)) |decl| {
if (@TypeOf(@field(T, decl.name)) == type) {
switch (@typeInfo(@field(T, decl.name))) {
.@"struct", .@"enum", .@"union", .@"opaque" => refAllDeclsRecursive(@field(T, decl.name), depth - 1),
else => {},
}
}
_ = &@field(T, decl.name);
}
}
var ran = false;
@ -38,7 +51,7 @@ pub fn runTest(name: []const u8, code: [:0]const u8) !void {
}
pub fn runFuzzFindingTest(name: []const u8, code: []const u8) !void {
utils.Arena.initScratch(1024 * 1024);
utils.Arena.initScratch(1024 * 1024 * 10);
defer utils.Arena.deinitScratch();
const gpa = std.testing.allocator;
@ -46,9 +59,9 @@ pub fn runFuzzFindingTest(name: []const u8, code: []const u8) !void {
std.debug.print("{s}\n", .{code});
//errdefer {
// const stderr = std.io.getStdErr();
// const colors = std.io.tty.detectConfig(stderr);
// test_util.testBuilder(name, code, gpa, stderr.writer().any(), colors, true) catch {};
//const stderr = std.io.getStdErr();
//const colors = std.io.tty.detectConfig(stderr);
//test_util.testBuilder(name, code, gpa, stderr.writer().any(), colors, true) catch {};
//}
try test_util.testBuilder(name, code, gpa, std.io.null_writer.any(), .no_color, false);

View file

@ -408,6 +408,7 @@ pub fn EntStore(comptime M: type) type {
} });
};
const store_fields = std.meta.fields(Store);
const data_fields = std.meta.fields(Data);
const Self = @This();
pub inline fn isValid(self: *Self, comptime kind: Tag, idx: usize) bool {
@ -435,19 +436,25 @@ pub fn EntStore(comptime M: type) type {
return &@field(self.rpr, fieldName(@TypeOf(id).Data).name).items[@intFromEnum(id)];
}
pub inline fn unwrap(self: *Self, id: Data, comptime kind: Tag) ?*if (@hasDecl(std.meta.TagPayload(Data, kind), "identity"))
std.meta.TagPayload(Data, kind)
pub fn TagPayload(comptime kind: Tag) type {
return data_fields[@intFromEnum(kind)].type;
}
pub inline fn unwrap(self: *Self, id: Data, comptime kind: Tag) ?*if (@hasDecl(TagPayload(kind), "identity"))
TagPayload(kind)
else
std.meta.TagPayload(Data, kind).Data {
TagPayload(kind).Data {
if (id != kind) return null;
const i = @field(id, @tagName(kind));
if (@hasDecl(std.meta.TagPayload(Data, kind), "identity")) return i;
if (@hasDecl(TagPayload(kind), "identity")) return i;
return &@field(self.rpr, @tagName(kind)).items[@intFromEnum(i)];
}
};
}
pub fn EntId(comptime D: type) type {
if (@hasDecl(D, "Id")) return D.Id;
return enum(u32) {
_,
pub const Data = D;

View file

@ -1,3 +1,3 @@
main:
li32 $1, 0
li64 $1, 0
tx

View file

@ -12,10 +12,10 @@ main:
ld $31, $254, -24, 24
tx
NameMap(Enum = Nm)(StrBuf = [6]u8, IndexBuf = [4]uint).get:
andi $3, $2, 255
addi64 $3, $3, 1
addi64 $4, $1, 8
andi $2, $2, 255
addi64 $3, $2, 1
muli64 $2, $2, 8
addi64 $4, $1, 8
muli64 $3, $3, 8
add64 $2, $4, $2
add64 $3, $4, $3