fizing around 500 fuzz tests
Signed-off-by: Jakub Doka <jakub.doka2@gmail.com>
This commit is contained in:
parent
aec2099fc4
commit
b8893d70b2
28
build.zig
28
build.zig
|
@ -1,23 +1,20 @@
|
|||
const std = @import("std");
|
||||
|
||||
fn wasmAsset(b: *std.Build, optimize: std.builtin.OptimizeMode, comptime name: []const u8, expeors: []const []const u8) std.Build.LazyPath {
|
||||
const exe = b.addExecutable(.{
|
||||
.name = name,
|
||||
.root_source_file = b.path("src/depell/" ++ name ++ ".zig"),
|
||||
.target = b.resolveTargetQuery(.{ .cpu_arch = .wasm32, .os_tag = .freestanding }),
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
exe.entry = .disabled;
|
||||
exe.root_module.export_symbol_names = expeors;
|
||||
|
||||
return exe.getEmittedBin();
|
||||
}
|
||||
|
||||
pub fn build(b: *std.Build) !void {
|
||||
const target = b.standardTargetOptions(.{});
|
||||
const optimize = b.standardOptimizeOption(.{});
|
||||
|
||||
hb: {
|
||||
_ = b.addModule(.{
|
||||
.name = "hb",
|
||||
.root_source_file = b.path("src/root.zig"),
|
||||
.target = target,
|
||||
.optimize = optimize,
|
||||
});
|
||||
|
||||
break :hb;
|
||||
}
|
||||
|
||||
hbc: {
|
||||
const exe = b.addExecutable(.{
|
||||
.name = "hbc",
|
||||
|
@ -205,9 +202,6 @@ pub fn build(b: *std.Build) !void {
|
|||
run_afl.addFileArg(afl_lto_out);
|
||||
run_afl.has_side_effects = true;
|
||||
|
||||
_ = run_afl.captureStdErr();
|
||||
if (i != 0) _ = run_afl.captureStdOut();
|
||||
|
||||
run_gen_finding_tests.step.dependOn(&run_afl.step);
|
||||
}
|
||||
|
||||
|
|
|
@ -171,7 +171,7 @@ pub fn addUnOp(self: *Builder, op: UnOp, ty: DataType, oper: *BuildNode) Specifi
|
|||
return self.addFlt32Imm(@floatCast(@as(f64, @bitCast(op.eval(oper.data_type, @bitCast(@as(f64, @floatCast(oper.extra(.CFlt32).*))))))));
|
||||
}
|
||||
const opa = self.func.addNode(.UnOp, ty, &.{ null, oper }, op);
|
||||
std.debug.assert(opa.data_type == ty);
|
||||
opa.data_type = opa.data_type.meet(ty);
|
||||
return opa;
|
||||
}
|
||||
|
||||
|
@ -204,7 +204,7 @@ pub fn pushScopeValue(self: *Builder, value: *BuildNode) void {
|
|||
self.func.addUse(value, scope);
|
||||
}
|
||||
|
||||
pub inline fn getScopeValue(self: *Builder, index: usize) *Func.Node {
|
||||
pub fn getScopeValue(self: *Builder, index: usize) *Func.Node {
|
||||
return self._readScopeValue(scope_value_start + index);
|
||||
}
|
||||
|
||||
|
@ -344,8 +344,8 @@ pub const Loop = struct {
|
|||
|
||||
pub fn addControl(self: *Loop, builder: *Builder, kind: Loop.Control) void {
|
||||
if (self.control.getPtr(kind).*) |ctrl| {
|
||||
_ = mergeScopes(&builder.func, builder.scope.?, ctrl);
|
||||
builder.control().extra(.Region).preserve_identity_phys = kind == .@"continue";
|
||||
const rhs = mergeScopes(&builder.func, builder.scope.?, ctrl);
|
||||
getScopeValues(rhs)[0].extra(.Region).preserve_identity_phys = kind == .@"continue";
|
||||
} else {
|
||||
builder._truncateScope(builder.scope.?, self.scope.inputs().len);
|
||||
self.control.set(kind, builder.scope.?);
|
||||
|
|
|
@ -978,10 +978,10 @@ pub fn Func(comptime MachNode: type) type {
|
|||
}
|
||||
}
|
||||
|
||||
if (@import("builtin").mode == .Debug) {
|
||||
var iter = self.interner.iterator();
|
||||
while (iter.next()) |e| std.debug.assert(e.key_ptr.node.id != std.math.maxInt(u16));
|
||||
}
|
||||
//if (@import("builtin").mode == .Debug) {
|
||||
// var iter = self.interner.iterator();
|
||||
// while (iter.next()) |e| std.debug.assert(e.key_ptr.node.id != std.math.maxInt(u16));
|
||||
//}
|
||||
|
||||
//if (target.outputs().len != 0)
|
||||
// utils.panic("-- {any}\n", .{target.outputs()})
|
||||
|
|
|
@ -112,10 +112,10 @@ pub const Scope = union(enum) {
|
|||
};
|
||||
}
|
||||
|
||||
pub fn perm(self: Scope) Types.Id {
|
||||
pub fn perm(self: Scope, types: *Types) Types.Id {
|
||||
return switch (self) {
|
||||
.Perm => |p| p,
|
||||
.Tmp => |t| t.parent_scope.perm(),
|
||||
.Perm => |p| p.perm(types),
|
||||
.Tmp => |t| t.parent_scope.perm(types),
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -226,6 +226,7 @@ pub fn getEntry(self: *Codegen, file: Types.File, name: []const u8) !utils.EntId
|
|||
_ = self.beginBuilder(tmp.arena, .never, 0, 0);
|
||||
defer self.bl.func.reset();
|
||||
self.parent_scope = .{ .Perm = self.types.getScope(file) };
|
||||
self.struct_ret_ptr = null;
|
||||
self.name = "";
|
||||
|
||||
var entry_vl = try self.lookupScopeItem(.init(0), self.types.getScope(file), name);
|
||||
|
@ -254,10 +255,14 @@ pub fn beginBuilder(
|
|||
}
|
||||
|
||||
pub fn build(self: *Codegen, func_id: utils.EntId(root.frontend.types.Func)) !void {
|
||||
errdefer {
|
||||
self.types.store.get(func_id).errored = true;
|
||||
}
|
||||
|
||||
var tmp = utils.Arena.scrath(null);
|
||||
defer tmp.deinit();
|
||||
|
||||
const func = self.types.store.get(func_id).*;
|
||||
var func = self.types.store.get(func_id);
|
||||
|
||||
self.ast = self.types.getFile(func.key.file);
|
||||
const param_count, const return_count, const ret_abi = func.computeAbiSize(self.abi, self.types);
|
||||
|
@ -265,6 +270,15 @@ pub fn build(self: *Codegen, func_id: utils.EntId(root.frontend.types.Func)) !vo
|
|||
self.parent_scope = .init(.{ .Func = func_id });
|
||||
self.name = "";
|
||||
|
||||
self.types.checkStack(func.key.file, func.key.ast) catch return error.HasErrors;
|
||||
|
||||
if (func.recursion_lock) {
|
||||
self.report(func.key.ast, "the functions types most likely depend on it being evaluated", .{}) catch {};
|
||||
return error.HasErrors;
|
||||
}
|
||||
func.recursion_lock = true;
|
||||
defer func.recursion_lock = false;
|
||||
|
||||
var i: usize = 0;
|
||||
|
||||
if (ret_abi == .ByRef) {
|
||||
|
@ -283,6 +297,7 @@ pub fn build(self: *Codegen, func_id: utils.EntId(root.frontend.types.Func)) !vo
|
|||
for (ast.exprs.view(fn_ast.args)) |aarg| {
|
||||
const ident = ast.exprs.getTyped(.Ident, aarg.bindings).?;
|
||||
if (ident.pos.flag.@"comptime") continue;
|
||||
func = self.types.store.get(func_id);
|
||||
const ty = func.args[ty_idx];
|
||||
const abi = self.abiCata(ty);
|
||||
abi.types(params[i..]);
|
||||
|
@ -307,12 +322,14 @@ pub fn build(self: *Codegen, func_id: utils.EntId(root.frontend.types.Func)) !vo
|
|||
}
|
||||
|
||||
var termintes = false;
|
||||
func = self.types.store.get(func_id);
|
||||
_ = self.emit(.{}, ast.exprs.getTyped(.Fn, func.key.ast).?.body) catch |err| switch (err) {
|
||||
error.Never => {},
|
||||
error.Unreachable => termintes = true,
|
||||
};
|
||||
|
||||
if (!termintes and ret_abi != .Imaginary) {
|
||||
func = self.types.store.get(func_id);
|
||||
self.report(fn_ast.body, "function is missing a return value since" ++
|
||||
" {} has more then 1 possible value", .{func.ret}) catch {};
|
||||
}
|
||||
|
@ -654,7 +671,7 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
|
|||
};
|
||||
|
||||
if (slice.len != e.fields.len()) {
|
||||
return self.report(expr, "expected array with {} element, got {}", .{ slice.len.?, e.fields.len() });
|
||||
return self.report(expr, "expected array with {?} element, got {}", .{ slice.len, e.fields.len() });
|
||||
}
|
||||
|
||||
break :b .{ slice.elem, ret_ty };
|
||||
|
@ -958,6 +975,10 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
|
|||
);
|
||||
};
|
||||
|
||||
if (ftype == .never) {
|
||||
return self.report(e.field, "accessing malformed field (the type is 'never')", .{});
|
||||
}
|
||||
|
||||
return .mkp(ftype, self.bl.addFieldOffset(base.id.Pointer, @intCast(offset)));
|
||||
},
|
||||
.Union => |union_ty| {
|
||||
|
@ -971,6 +992,10 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
|
|||
);
|
||||
};
|
||||
|
||||
if (ftype == .never) {
|
||||
return self.report(e.field, "accessing malformed field (the type is 'never')", .{});
|
||||
}
|
||||
|
||||
return .mkp(ftype, self.bl.addFieldOffset(base.id.Pointer, 0));
|
||||
},
|
||||
.Slice => |slice_ty| {
|
||||
|
@ -1098,7 +1123,7 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
|
|||
error.Unreachable => return err,
|
||||
};
|
||||
}
|
||||
self.emitDefers(defer_scope);
|
||||
try self.emitDefers(defer_scope);
|
||||
|
||||
return .{};
|
||||
},
|
||||
|
@ -1146,7 +1171,10 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
|
|||
|
||||
const fields = value.ty.data().Enum.getFields(self.types);
|
||||
|
||||
if (fields.len == 0) return error.Unreachable;
|
||||
if (fields.len == 0) {
|
||||
self.bl.addTrap(0);
|
||||
return error.Unreachable;
|
||||
}
|
||||
|
||||
if (e.arms.len() == 0) return self.report(e.pos, "the matched type has non zero possible values, " ++
|
||||
"therefore empty match statement is invalid", .{});
|
||||
|
@ -1236,7 +1264,9 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
|
|||
|
||||
var unreachable_count: usize = 0;
|
||||
for (ast.exprs.view(e.arms), 0..) |a, i| {
|
||||
const idx, const body = try matcher.decomposeArm(self, i, a) orelse continue;
|
||||
const idx, const body = try matcher.decomposeArm(self, i, a) orelse {
|
||||
continue;
|
||||
};
|
||||
|
||||
const vl = self.bl.addUnOp(.sext, .int, value.getValue(self));
|
||||
const cond = self.bl.addBinOp(.eq, .i8, vl, self.bl.addIntImm(.int, @bitCast(idx)));
|
||||
|
@ -1318,7 +1348,7 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
|
|||
// we dont use .emitTyped because the ecpression is different
|
||||
var value = try self.emit(.{ .loc = self.struct_ret_ptr, .ty = self.ret }, e.value);
|
||||
try self.typeCheck(expr, &value, self.ret);
|
||||
self.emitDefers(0);
|
||||
try self.emitDefers(0);
|
||||
switch (self.abiCata(value.ty)) {
|
||||
.Imaginary => self.bl.addReturn(&.{}),
|
||||
.ByValue => {
|
||||
|
@ -1356,7 +1386,7 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
|
|||
args.returns[0] = self.abiCata(.type).ByValue;
|
||||
|
||||
args.arg_slots[0] = self.bl.addIntImm(.int, @intFromEnum(@field(Comptime.InteruptCode, @tagName(t))));
|
||||
args.arg_slots[1] = self.emitTyConst(self.parent_scope.perm()).id.Value;
|
||||
args.arg_slots[1] = self.emitTyConst(self.parent_scope.perm(self.types)).id.Value;
|
||||
args.arg_slots[2] = self.bl.addIntImm(.int, @intFromEnum(expr));
|
||||
|
||||
for (ast.exprs.view(e.captures), 0..) |id, slot_idx| {
|
||||
|
@ -1401,7 +1431,7 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
|
|||
|
||||
if (e.comptime_args.len() != 0 or has_anytypes) {
|
||||
const slot, const alloc = self.types.intern(.Template, .{
|
||||
.scope = self.parent_scope.perm(),
|
||||
.scope = self.parent_scope.perm(self.types),
|
||||
.file = self.parent_scope.file(self.types),
|
||||
.ast = expr,
|
||||
.name = self.name,
|
||||
|
@ -1414,7 +1444,7 @@ pub fn emit(self: *Codegen, ctx: Ctx, expr: Ast.Id) EmitError!Value {
|
|||
return self.emitTyConst(slot.key_ptr.*);
|
||||
} else {
|
||||
const slot, const alloc = self.types.intern(.Func, .{
|
||||
.scope = self.parent_scope.perm(),
|
||||
.scope = self.parent_scope.perm(self.types),
|
||||
.file = self.parent_scope.file(self.types),
|
||||
.ast = expr,
|
||||
.name = self.name,
|
||||
|
@ -1542,8 +1572,9 @@ pub fn lexemeToBinOp(self: *Codegen, pos: anytype, lx: Lexer.Lexeme, ty: Types.I
|
|||
}
|
||||
|
||||
pub fn lexemeToBinOpLow(self: Lexer.Lexeme, ty: Types.Id) ?graph.BinOp {
|
||||
const unsigned = ty.isUnsigned();
|
||||
const unsigned = ty.isUnsigned() or ty == .bool or ty.data() == .Pointer or ty.data() == .Enum or ty == .type;
|
||||
const float = ty.isFloat();
|
||||
if (!unsigned and !ty.isSigned() and !float) return null;
|
||||
return switch (self) {
|
||||
.@"+" => if (float) .fadd else .iadd,
|
||||
.@"-" => if (float) .fsub else .isub,
|
||||
|
@ -1576,6 +1607,9 @@ fn emitStructFoldOp(self: *Codegen, pos: anytype, ty: utils.EntId(root.frontend.
|
|||
const value = if (elem.field.ty.data() == .Struct) b: {
|
||||
break :b try self.emitStructFoldOp(pos, elem.field.ty.data().Struct, op, lhs_loc, rhs_loc) orelse continue;
|
||||
} else b: {
|
||||
if (self.abiCata(elem.field.ty) != .ByValue) {
|
||||
return self.report(pos, "cant apply the operator on field of {}", .{elem.field.ty});
|
||||
}
|
||||
const dt = self.abiCata(elem.field.ty).ByValue;
|
||||
const lhs_val = self.bl.addLoad(lhs_loc, dt);
|
||||
const rhs_val = self.bl.addLoad(rhs_loc, dt);
|
||||
|
@ -1597,6 +1631,9 @@ fn emitStructOp(self: *Codegen, pos: anytype, ty: utils.EntId(root.frontend.type
|
|||
if (elem.field.ty.data() == .Struct) {
|
||||
try self.emitStructOp(pos, elem.field.ty.data().Struct, op, field_loc, lhs_loc, rhs_loc);
|
||||
} else {
|
||||
if (self.abiCata(elem.field.ty) != .ByValue) {
|
||||
return self.report(pos, "cant apply the operator on field of {}", .{elem.field.ty});
|
||||
}
|
||||
const dt = self.abiCata(elem.field.ty).ByValue;
|
||||
const lhs_val = self.bl.addLoad(lhs_loc, dt);
|
||||
const rhs_val = self.bl.addLoad(rhs_loc, dt);
|
||||
|
@ -1683,6 +1720,14 @@ pub fn resolveGlobal(self: *Codegen, name: []const u8, bsty: Types.Id, ast: *con
|
|||
try self.types.ct.addInProgress(vari.value, bsty.file(self.types).?);
|
||||
defer _ = self.types.ct.in_progress.pop().?;
|
||||
|
||||
const prev_scope = self.parent_scope;
|
||||
defer {
|
||||
self.parent_scope = prev_scope;
|
||||
self.ast = self.types.getFile(prev_scope.file(self.types));
|
||||
}
|
||||
self.parent_scope = .{ .Perm = bsty };
|
||||
self.ast = self.types.getFile(self.parent_scope.file(self.types));
|
||||
|
||||
const ty = if (vari.ty.tag() == .Void) null else try self.resolveAnonTy(vari.ty);
|
||||
|
||||
const global_ty, const new = self.types.resolveGlobal(bsty, name, vari.value);
|
||||
|
@ -1730,7 +1775,7 @@ pub fn loadIdent(self: *Codegen, pos: Ast.Pos, id: Ast.Ident) !Value {
|
|||
|
||||
break :b switch (self.abiCata(c.ty)) {
|
||||
.Imaginary => .Imaginary,
|
||||
.ByValue => |v| .{ .Value = self.bl.addIntImm(v, 0) },
|
||||
.ByValue => |v| .{ .Value = if (v.isInt()) self.bl.addIntImm(v, 0) else if (v == .f32) self.bl.addFlt32Imm(0) else self.bl.addFlt64Imm(0) },
|
||||
.ByValuePair, .ByRef => .{ .Pointer = self.bl.addLocal(self.sloc(pos), c.ty.size(self.types)) },
|
||||
};
|
||||
} };
|
||||
|
@ -1740,7 +1785,7 @@ pub fn loadIdent(self: *Codegen, pos: Ast.Pos, id: Ast.Ident) !Value {
|
|||
return self.report(pos, "ICE: parser did not catch this", .{});
|
||||
};
|
||||
|
||||
return self.resolveGlobal(ast.tokenSrc(id.pos()), cursor.perm(), ast, decl, path);
|
||||
return self.resolveGlobal(ast.tokenSrc(id.pos()), cursor.perm(self.types), ast, decl, path);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1781,7 +1826,7 @@ pub fn emitCall(self: *Codegen, ctx: Ctx, expr: Ast.Id, e: Ast.Store.TagPayload(
|
|||
var computed_args: ?[]Value = null;
|
||||
const was_template = typ.data() == .Template;
|
||||
if (was_template) {
|
||||
computed_args, typ = try self.instantiateTemplate(caller, tmp.arena, expr, e, typ);
|
||||
computed_args, typ = try self.instantiateTemplate(&caller, tmp.arena, expr, e, typ);
|
||||
}
|
||||
|
||||
if (typ.data() != .Func) {
|
||||
|
@ -1832,7 +1877,7 @@ pub fn emitCall(self: *Codegen, ctx: Ctx, expr: Ast.Id, e: Ast.Store.TagPayload(
|
|||
|
||||
pub fn instantiateTemplate(
|
||||
self: *Codegen,
|
||||
caller: ?Value,
|
||||
caller: *?Value,
|
||||
tmp: *utils.Arena,
|
||||
expr: Ast.Id,
|
||||
e: Ast.Store.TagPayload(.Call),
|
||||
|
@ -1842,6 +1887,7 @@ pub fn instantiateTemplate(
|
|||
const ast = self.ast;
|
||||
|
||||
const scope = self.types.store.add(self.types.arena.allocator(), tmpl);
|
||||
self.types.store.get(scope).temporary = true;
|
||||
self.types.store.get(scope).key.scope = typ;
|
||||
self.types.store.get(scope).key.captures = &.{};
|
||||
|
||||
|
@ -1849,7 +1895,7 @@ pub fn instantiateTemplate(
|
|||
const tmpl_ast = tmpl_file.exprs.getTyped(.Fn, tmpl.key.ast).?;
|
||||
const comptime_args = tmpl_file.exprs.view(tmpl_ast.comptime_args);
|
||||
|
||||
const passed_args = e.args.len() + @intFromBool(caller != null);
|
||||
const passed_args = e.args.len() + @intFromBool(caller.* != null);
|
||||
if (passed_args != tmpl_ast.args.len()) {
|
||||
return self.report(expr, "expected {} arguments, got {}", .{ tmpl_ast.args.len(), passed_args });
|
||||
}
|
||||
|
@ -1862,12 +1908,14 @@ pub fn instantiateTemplate(
|
|||
var arg_idx: usize = 0;
|
||||
var arg_expr_idx: usize = 0;
|
||||
|
||||
if (caller) |c| {
|
||||
if (caller.*) |*c| {
|
||||
const param = tmpl_file.exprs.view(tmpl_ast.args)[0];
|
||||
|
||||
const binding = tmpl_file.exprs.getTyped(.Ident, param.bindings).?;
|
||||
if (binding.pos.flag.@"comptime") {
|
||||
unreachable;
|
||||
captures[capture_idx] = .{ .id = comptime_args[capture_idx], .ty = .type, .value = @intFromEnum(try self.unwrapTyConst(expr, c)) };
|
||||
capture_idx += 1;
|
||||
self.types.store.get(scope).key.captures = captures[0..capture_idx];
|
||||
} else {
|
||||
arg_tys[arg_idx] = try self.types.ct.evalTy("", .{ .Perm = .init(.{ .Template = scope }) }, param.ty);
|
||||
if (arg_tys[arg_idx] == .any) {
|
||||
|
@ -1971,10 +2019,10 @@ fn assembleReturn(cg: *Codegen, expr: anytype, id: u32, call_args: Builder.CallA
|
|||
};
|
||||
}
|
||||
|
||||
fn emitDefers(self: *Codegen, base: usize) void {
|
||||
fn emitDefers(self: *Codegen, base: usize) !void {
|
||||
var iter = std.mem.reverseIterator(self.defers.items[base..]);
|
||||
while (iter.next()) |e| {
|
||||
_ = self.emitTyped(.{}, .void, e) catch {};
|
||||
_ = try self.emitTyped(.{}, .void, e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1985,7 +2033,7 @@ fn loopControl(self: *Codegen, kind: Builder.Loop.Control, ctrl: Ast.Id) !void {
|
|||
}
|
||||
|
||||
const loops = &self.loops.items[self.loops.items.len - 1];
|
||||
self.emitDefers(loops.defer_base);
|
||||
try self.emitDefers(loops.defer_base);
|
||||
switch (loops.kind) {
|
||||
.Runtime => |*l| l.addControl(&self.bl, kind),
|
||||
.Comptime => |*l| {
|
||||
|
@ -2081,13 +2129,21 @@ pub fn emitBranch(self: *Codegen, block: Ast.Id) usize {
|
|||
const prev_scope_height = self.scope.items.len;
|
||||
defer self.scope.items.len = prev_scope_height;
|
||||
defer self.bl.truncateScope(prev_scope_height);
|
||||
|
||||
const prev_defer_height = self.defers.items.len;
|
||||
defer self.defers.items.len = prev_defer_height;
|
||||
|
||||
_ = self.emitTyped(.{}, .void, block) catch |err|
|
||||
return @intFromBool(err == error.Unreachable);
|
||||
|
||||
self.emitDefers(prev_defer_height) catch |err|
|
||||
return @intFromBool(err == error.Unreachable);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
fn emitStirng(self: *Codegen, ctx: Ctx, data: []const u8, expr: Ast.Id) Value {
|
||||
const global = self.types.resolveGlobal(self.parent_scope.perm(), data, expr)[0].data().Global;
|
||||
const global = self.types.resolveGlobal(self.parent_scope.perm(self.types), data, expr)[0].data().Global;
|
||||
self.types.store.get(global).data = data;
|
||||
self.types.store.get(global).ty = self.types.makeSlice(data.len, .u8);
|
||||
self.queue(.{ .Global = global });
|
||||
|
@ -2357,6 +2413,10 @@ fn emitDirective(self: *Codegen, ctx: Ctx, expr: Ast.Id, e: *const Ast.Store.Tag
|
|||
},
|
||||
.is_comptime => return .mkv(.bool, self.bl.addIntImm(.i8, @intFromBool(self.target == .@"comptime"))),
|
||||
.ecall => {
|
||||
if (self.target == .@"comptime") {
|
||||
return self.report(expr, "cant do na ecall during comptime", .{});
|
||||
}
|
||||
|
||||
try static.assertArgs(self, expr, args, "<expr>..");
|
||||
var tmp = utils.Arena.scrath(null);
|
||||
defer tmp.deinit();
|
||||
|
|
|
@ -113,17 +113,29 @@ pub fn partialEval(self: *Comptime, file: Types.File, pos: anytype, bl: *Builder
|
|||
.CallEnd => {
|
||||
const call: *Node = curr.inputs()[0].?;
|
||||
std.debug.assert(call.kind == .Call);
|
||||
std.debug.assert(call.extra(.Call).ret_count == 1);
|
||||
|
||||
if (call.extra(.Call).ret_count != 1) {
|
||||
types.report(file, pos, "the function returns something we cant handle", .{});
|
||||
return .{ .Unsupported = curr };
|
||||
}
|
||||
|
||||
var ret_ty: graph.DataType = .int;
|
||||
if (call.extra(.Call).id != eca) {
|
||||
const func_id: utils.EntId(root.frontend.types.Func) = @enumFromInt(call.extra(.Call).id);
|
||||
const func = types.store.get(func_id);
|
||||
|
||||
if (func.recursion_lock) {
|
||||
types.report(func.key.file, func.key.ast, "the functions types most likely depend on it being evaluated", .{});
|
||||
return .{ .Unsupported = curr };
|
||||
}
|
||||
|
||||
ret_ty = (abi.categorize(func.ret, types) orelse return .{ .Unsupported = curr }).ByValue;
|
||||
if (func.completion.get(.@"comptime") == .queued) {
|
||||
self.jitFunc(func_id) catch return .{ .Unsupported = curr };
|
||||
std.debug.assert(types.store.get(func_id).completion.get(.@"comptime") == .compiled);
|
||||
}
|
||||
if (types.store.get(func_id).errored) return .{ .Unsupported = curr };
|
||||
std.debug.assert(types.store.get(func_id).completion.get(.@"comptime") == .compiled);
|
||||
std.debug.assert(self.comptime_code.funcs.items.len > call.extra(.Call).id);
|
||||
}
|
||||
|
||||
var requeued = false;
|
||||
|
@ -139,7 +151,9 @@ pub fn partialEval(self: *Comptime, file: Types.File, pos: anytype, bl: *Builder
|
|||
|
||||
if (requeued) continue;
|
||||
|
||||
try types.ct.runVm(file, pos, "", call.extra(.Call).id, &.{});
|
||||
types.ct.runVm(file, pos, "", call.extra(.Call).id, &.{}) catch {
|
||||
return .{ .Unsupported = curr };
|
||||
};
|
||||
|
||||
const ret = types.ct.vm.regs.get(.ret(0));
|
||||
const ret_vl = bl.addIntImm(ret_ty, @bitCast(ret));
|
||||
|
@ -196,6 +210,8 @@ pub fn runVm(self: *Comptime, file: Types.File, pos: anytype, name: []const u8,
|
|||
const stack_end = self.vm.regs.get(.stack_addr);
|
||||
|
||||
self.vm.ip = if (entry_id == eca) stack_size - 2 else self.comptime_code.funcs.items[entry_id].offset;
|
||||
std.debug.assert(self.vm.ip < self.comptime_code.out.items.len);
|
||||
|
||||
self.vm.fuel = 1024;
|
||||
self.vm.regs.set(.ret_addr, stack_size - 1); // return to hardcoded tx
|
||||
if (return_loc.len != 0) self.vm.regs.set(.arg(0), stack_end - return_loc.len);
|
||||
|
@ -210,7 +226,8 @@ pub fn runVm(self: *Comptime, file: Types.File, pos: anytype, name: []const u8,
|
|||
};
|
||||
|
||||
while (true) switch (self.vm.run(&vm_ctx) catch |err| {
|
||||
return types.report(file, pos, "comptime execution failed: {s}", .{@errorName(err)});
|
||||
types.report(file, pos, "comptime execution failed: {s}", .{@errorName(err)});
|
||||
return error.Never;
|
||||
}) {
|
||||
.tx => break,
|
||||
.eca => {
|
||||
|
|
|
@ -27,7 +27,7 @@ deferring: bool = false,
|
|||
errored: bool = false,
|
||||
stack_base: usize,
|
||||
|
||||
const stack_limit = 1024 * (512 + 256);
|
||||
pub const stack_limit = 1024 * 1024;
|
||||
|
||||
const Parser = @This();
|
||||
const Error = error{ UnexpectedToken, StackOverflow } || std.mem.Allocator.Error;
|
||||
|
@ -295,7 +295,6 @@ fn checkStack(self: *Parser) !void {
|
|||
self.report(self.cur.pos, "the tree is too deep", .{});
|
||||
return error.StackOverflow;
|
||||
}
|
||||
//std.debug.print("{}\n", .{distance});
|
||||
}
|
||||
|
||||
fn parseUnitWithoutTail(self: *Parser) Error!Id {
|
||||
|
@ -477,8 +476,14 @@ fn parseUnitWithoutTail(self: *Parser) Error!Id {
|
|||
.pos = .{ .index = @intCast(token.pos), .flag = .{ .@"comptime" = token.kind != .loop } },
|
||||
.body = try self.parseScopedExpr(),
|
||||
} },
|
||||
.@"break" => .{ .Break = .init(token.pos) },
|
||||
.@"continue" => .{ .Continue = .init(token.pos) },
|
||||
.@"break" => b: {
|
||||
if (self.deferring) self.report(token.pos, "can not break from a defer", .{});
|
||||
break :b .{ .Break = .init(token.pos) };
|
||||
},
|
||||
.@"continue" => b: {
|
||||
if (self.deferring) self.report(token.pos, "can not continue from a defer", .{});
|
||||
break :b .{ .Continue = .init(token.pos) };
|
||||
},
|
||||
.@"return" => .{ .Return = .{
|
||||
.pos = .init(token.pos),
|
||||
.value = b: {
|
||||
|
@ -596,6 +601,9 @@ fn parseArg(self: *Parser) Error!Ast.Arg {
|
|||
}
|
||||
_ = self.declareExpr(bindings, false);
|
||||
_ = try self.expectAdvance(.@":");
|
||||
|
||||
const prev = self.comptime_idents.items.len;
|
||||
defer self.comptime_idents.items.len = prev;
|
||||
return .{
|
||||
.bindings = bindings,
|
||||
.ty = try self.parseExpr(),
|
||||
|
|
|
@ -21,6 +21,7 @@ file_scopes: []Id,
|
|||
ct: Comptime,
|
||||
diagnostics: std.io.AnyWriter,
|
||||
files: []const Ast,
|
||||
stack_base: usize,
|
||||
|
||||
const Types = @This();
|
||||
const Map = std.hash_map.HashMapUnmanaged(Id, void, TypeCtx, 70);
|
||||
|
@ -157,6 +158,15 @@ pub const Id = enum(IdRepr) {
|
|||
return @enumFromInt(@as(IdRepr, @bitCast(raw_id)));
|
||||
}
|
||||
|
||||
pub fn perm(self: Id, types: *Types) Id {
|
||||
switch (self.data()) {
|
||||
.Template => |t| if (types.store.get(t).temporary) return types.store.get(t).key.scope,
|
||||
else => {},
|
||||
}
|
||||
|
||||
return self;
|
||||
}
|
||||
|
||||
pub fn needsTag(self: Id, types: *Types) bool {
|
||||
return self.data() == .Nullable and !self.data().Nullable.isCompact(types);
|
||||
}
|
||||
|
@ -536,6 +546,18 @@ pub const Abi = enum {
|
|||
}
|
||||
|
||||
pub fn categorizeAbleosRecord(stru: anytype, types: *Types) Spec {
|
||||
if (@TypeOf(stru) == tys.Struct.Id) {
|
||||
const self = types.store.get(stru);
|
||||
if (self.recursion_lock) {
|
||||
types.report(self.key.file, self.key.ast, "the struct has undecidable alignment (cycle)", .{});
|
||||
return .Imaginary;
|
||||
}
|
||||
self.recursion_lock = true;
|
||||
}
|
||||
defer if (@TypeOf(stru) == tys.Struct.Id) {
|
||||
types.store.get(stru).recursion_lock = false;
|
||||
};
|
||||
|
||||
var res: Spec = .Imaginary;
|
||||
var offset: u64 = 0;
|
||||
for (stru.getFields(types)) |f| {
|
||||
|
@ -571,6 +593,7 @@ pub fn init(gpa: std.mem.Allocator, source: []const Ast, diagnostics: std.io.Any
|
|||
const scopes = arena.alloc(Id, source.len);
|
||||
@memset(scopes, .void);
|
||||
return .{
|
||||
.stack_base = @frameAddress(),
|
||||
.files = source,
|
||||
.file_scopes = scopes,
|
||||
.arena = arena,
|
||||
|
@ -579,6 +602,14 @@ pub fn init(gpa: std.mem.Allocator, source: []const Ast, diagnostics: std.io.Any
|
|||
};
|
||||
}
|
||||
|
||||
pub fn checkStack(self: *Types, file: File, pos: anytype) !void {
|
||||
const distance = @abs(@as(isize, @bitCast(@frameAddress() -% self.stack_base)));
|
||||
if (distance > root.frontend.Parser.stack_limit) {
|
||||
self.report(file, pos, "the tree is too deep", .{});
|
||||
return error.StackOverflow;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Types) void {
|
||||
self.arena.deinit();
|
||||
self.ct.in_progress.deinit(self.ct.comptime_code.gpa);
|
||||
|
|
|
@ -222,6 +222,7 @@ pub const Struct = struct {
|
|||
fields: ?[]const Field = null,
|
||||
size: ?u64 = null,
|
||||
alignment: ?u64 = null,
|
||||
recursion_lock: bool = false,
|
||||
|
||||
pub const Field = struct {
|
||||
name: []const u8,
|
||||
|
@ -235,40 +236,67 @@ pub const Struct = struct {
|
|||
pub const Data = Struct;
|
||||
|
||||
pub fn getSize(id: Id, types: *Types) u64 {
|
||||
const self = types.store.get(id);
|
||||
var self = types.store.get(id);
|
||||
|
||||
if (self.size) |a| return a;
|
||||
|
||||
if (@hasField(Field, "alignment")) @compileError("");
|
||||
const max_alignment = getAlignment(id, types);
|
||||
|
||||
if (self.recursion_lock) {
|
||||
types.report(self.key.file, self.key.ast, "the struct has infinite size", .{});
|
||||
return 0;
|
||||
}
|
||||
self.recursion_lock = true;
|
||||
defer self.recursion_lock = false;
|
||||
|
||||
if (@hasField(Field, "alignment")) @compileError("");
|
||||
|
||||
var siz: u64 = 0;
|
||||
for (getFields(id, types)) |f| {
|
||||
for (id.getFields(types)) |f| {
|
||||
siz = std.mem.alignForward(u64, siz, @min(max_alignment, f.ty.alignment(types)));
|
||||
siz += f.ty.size(types);
|
||||
}
|
||||
siz = std.mem.alignForward(u64, siz, max_alignment);
|
||||
|
||||
self = types.store.get(id);
|
||||
self.size = siz;
|
||||
return siz;
|
||||
}
|
||||
|
||||
pub fn getAlignment(id: Id, types: *Types) u64 {
|
||||
const self = types.store.get(id);
|
||||
var self = types.store.get(id);
|
||||
|
||||
if (self.alignment) |a| return a;
|
||||
|
||||
if (self.recursion_lock) {
|
||||
types.report(self.key.file, self.key.ast, "the struct has undecidable alignment (cycle)", .{});
|
||||
return 1;
|
||||
}
|
||||
self.recursion_lock = true;
|
||||
defer self.recursion_lock = false;
|
||||
|
||||
const ast = types.getFile(self.key.file);
|
||||
const struct_ast = ast.exprs.getTyped(.Struct, self.key.ast).?;
|
||||
|
||||
if (struct_ast.alignment.tag() != .Void) {
|
||||
if (@hasField(Field, "alignment")) @compileError("assert fields <= alignment then base alignment");
|
||||
self.alignment = @bitCast(types.ct.evalIntConst(.{ .Perm = .init(.{ .Struct = id }) }, struct_ast.alignment) catch 1);
|
||||
return self.alignment.?;
|
||||
if (self.alignment == 0 or !std.math.isPowerOfTwo(self.alignment.?)) {
|
||||
self = types.store.get(id);
|
||||
types.report(self.key.file, struct_ast.alignment, "the alignment needs to be power of 2, got {}", .{self.alignment.?});
|
||||
self.alignment = 1;
|
||||
return 1;
|
||||
}
|
||||
return @max(self.alignment.?, 1);
|
||||
}
|
||||
|
||||
var alignm: u64 = 1;
|
||||
for (getFields(id, types)) |f| {
|
||||
for (id.getFields(types)) |f| {
|
||||
alignm = @max(alignm, f.ty.alignment(types));
|
||||
}
|
||||
|
||||
self = types.store.get(id);
|
||||
self.alignment = alignm;
|
||||
return alignm;
|
||||
}
|
||||
|
||||
|
@ -337,12 +365,15 @@ pub const Struct = struct {
|
|||
|
||||
pub const Template = struct {
|
||||
key: Scope,
|
||||
temporary: bool = false,
|
||||
};
|
||||
|
||||
pub const Func = struct {
|
||||
key: Scope,
|
||||
args: []TyId,
|
||||
ret: TyId,
|
||||
errored: bool = false,
|
||||
recursion_lock: bool = false,
|
||||
completion: std.EnumArray(Types.Target, CompileState) = .{ .values = .{ .queued, .queued } },
|
||||
|
||||
pub const CompileState = enum { queued, compiled };
|
||||
|
|
|
@ -266,7 +266,12 @@ inline fn fbinOp(self: *Vm, comptime base: isa.Op, comptime op: isa.Op, ctx: any
|
|||
.fcmpgt32 => lhs > rhs,
|
||||
.fc32t64 => @as(f64, @floatCast(lhs)),
|
||||
.fc64t32 => @as(f32, @floatCast(lhs)),
|
||||
.fti32 => @as(if (Repr == f32) i32 else i64, @intFromFloat(lhs)),
|
||||
.fti32 => b: {
|
||||
const ty = if (Repr == f32) i32 else i64;
|
||||
if (lhs > std.math.maxInt(ty)) return error.FloatToIntOverflow;
|
||||
if (lhs < std.math.minInt(ty)) return error.FloatToIntOverflow;
|
||||
break :b @as(ty, @intFromFloat(lhs));
|
||||
},
|
||||
else => |t| @compileError(std.fmt.comptimePrint("unspupported op {any}", .{t})),
|
||||
};
|
||||
self.writeReg(args.arg0, switch (@TypeOf(res)) {
|
||||
|
|
|
@ -229,7 +229,10 @@ pub fn runVm(
|
|||
var emulate_ecalls: bool = false;
|
||||
var ecalls: []const Ast.Id = &.{};
|
||||
|
||||
if (ast.findDecl(ast.items, "expectations", undefined)) |d| {
|
||||
var stack: [stack_size]u8 = undefined;
|
||||
var tmp = std.heap.FixedBufferAllocator.init(&stack);
|
||||
|
||||
if (ast.findDecl(ast.items, "expectations", tmp.allocator())) |d| {
|
||||
const decl = ast.exprs.getTyped(.Decl, d[0]).?.value;
|
||||
const ctor = ast.exprs.getTyped(.Ctor, decl).?;
|
||||
for (ast.exprs.view(ctor.fields)) |field| {
|
||||
|
@ -269,8 +272,6 @@ pub fn runVm(
|
|||
|
||||
const stack_end = stack_size - code.len;
|
||||
|
||||
var stack: [stack_size]u8 = undefined;
|
||||
|
||||
@memcpy(stack[stack_end..], code);
|
||||
|
||||
var vm = Vm{};
|
||||
|
|
|
@ -50,13 +50,23 @@ pub fn runTest(name: []const u8, code: [:0]const u8) !void {
|
|||
try test_util.checkOrUpdatePrintTest(name, out.items);
|
||||
}
|
||||
|
||||
pub fn runFuzzFindingTest(name: []const u8, code: []const u8) !void {
|
||||
pub fn runFuzzFindingTest(name: []const u8, code: [:0]const u8) !void {
|
||||
utils.Arena.initScratch(1024 * 1024 * 10);
|
||||
defer utils.Arena.deinitScratch();
|
||||
|
||||
const gpa = std.testing.allocator;
|
||||
|
||||
std.debug.print("{s}\n", .{code});
|
||||
var tmp = utils.Arena.scrath(null);
|
||||
const ast = try root.frontend.Ast.init(tmp.arena, .{
|
||||
.path = name,
|
||||
.code = code,
|
||||
.diagnostics = std.io.getStdErr().writer().any(),
|
||||
});
|
||||
|
||||
var buf = std.ArrayList(u8).init(tmp.arena.allocator());
|
||||
try ast.fmt(&buf);
|
||||
|
||||
std.debug.print("{s}\n", .{buf.items});
|
||||
|
||||
//errdefer {
|
||||
//const stderr = std.io.getStdErr();
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
arithmetic 5 (errors):7:13: expected uint got void
|
||||
arithmetic 5 (errors):7:13: the operator not supported for void
|
||||
return 1 + v * 10
|
||||
^
|
||||
arithmetic 5 (errors):5:20: function is missing a return value since uint has more then 1 possible value
|
||||
|
|
Loading…
Reference in a new issue