lots of work again again

reorganise files again
update spec
prefer ^u8 to ^void
use better-than-c names for memory functions
start work on iterators (spec incomplete)
start work on page allocator
implement arbitrary quicksort
implement into_iter, sort, sort_with, find for Vec(T)
minor improvement to soundness of Type struct
This commit is contained in:
koniifer 2025-01-15 17:41:51 +00:00
parent fd42e53ae5
commit a5e89020c4
18 changed files with 419 additions and 124 deletions

View file

@ -1,6 +1,6 @@
# lily specification
> [!Important] spec version: 0.0.1
> preceding version 1.0.0, the spec is provisional and may be subject to change.
> [!Important] spec version: 0.0.2
> before version 1.0.0, the spec is provisional and may be subject to change.
a collection of guidelines for programmers to use to create lily-compatible implementations.
@ -8,3 +8,4 @@ the following files define the spec:
- [hashers](./spec/hash.md)
- [allocators](./spec/alloc.md)
- [random number generators](./spec/rand.md)
- [iterators](./spec/iter.md)

View file

@ -1,6 +1,6 @@
# allocators
> [!tip]
well designed allocators should ensure they only free or reallocate allocations they made.
well designed allocators should ensure they only deallocate or reallocate allocations they made.
1. all spec compliant allocators should implement:
> unless otherwise stated, functions can be optionally inline.<br>
@ -12,7 +12,7 @@ Allocator := struct {
/// prepare to be deallocated.
deinit := fn(self: ^Self): void
/// should return null on failure.
/// should free any temporary allocations on failure.
/// should dealloc any intermediate allocations on failure.
alloc := fn(self: ^Self, $T: type, count: uint): ?^T
/// same behaviour as alloc, except:
/// must be zeroed.
@ -20,8 +20,8 @@ Allocator := struct {
/// same behaviour as alloc, except:
/// must move data to new allocation,
/// must ensure the old allocation is freed at some point.
realloc := fn(self: ^Self, $T: type, ptr: ^T, count: uint): ?^T
/// must free or schedule the freeing of the given allocation
free := fn(self: ^Self, $T: type, ptr: ^T): void
realloc := fn(self: ^Self, $T: type, ptr: ^T, new_count: uint): ?^T
/// must dealloc or schedule the freeing of the given allocation
dealloc := fn(self: ^Self, $T: type, ptr: ^T): void
}
```

2
docs/spec/iter.md Normal file
View file

@ -0,0 +1,2 @@
# iterators
## spec to-be-defined

View file

@ -1,2 +1,3 @@
.{RawAllocator} := @use("raw_allocator.hb");
.{SimpleAllocator} := @use("simple_allocator.hb")
.{RawAllocator} := @use("raw.hb");
.{PageAllocator} := @use("page.hb");
.{SimpleAllocator} := @use("simple.hb")

157
src/lily/alloc/page.hb Normal file
View file

@ -0,0 +1,157 @@
.{Config, Target, Type, TypeOf, log, collections: .{Vec}, quicksort} := @use("../lib.hb");
.{RawAllocator} := @use("lib.hb")
/*
* intended behaviour: (may not be complete)
* alloc:
* -> if no pages, or all pages full: allocate new pages enough for next allocation.
* -> for first page with contiguous space enough for next allocation, allocate there.
* dealloc:
* -> requires:
* -> ptr must be the first ptr of the allocation
* -> ptr must be in allocation table
* -> remove allocation from allocation table
* -> recalculate contiguous free space in page it was contained in
* -> if page is empty, free page
* realloc:
* -> if new size == original size: do nothing
* -> if new size requires any new page(s) to be allocated, use Target.realloc() to get new pointer
* -> else, Self.dealloc, Self.alloc, memmove
* design todo:
* -> better perf to Self.alloc, memcopy, Self.dealloc?
* -> security would prefer zeroing deallocations before freeing (would require the new order above) (maybe make it a build toggle)
* -> swap Vec(T, RawAllocator) for internal tables (for efficiency)
* assumptions:
* -> pages are of a constant length (per system)
! IMPORTANT
! to ensure referential integrity, we do not move the contents of the blocks to new blocks
! (except when reallocating)
*/
Block := struct {
block: []u8,
largest_free: []u8,
}
// i cannot pretend this is efficient. (also incomplete)
PageAllocator := struct {
blocks: Vec(Block, RawAllocator),
allocs: Vec([]u8, RawAllocator),
blocks_raw: RawAllocator,
allocs_raw: RawAllocator,
new := fn(): Self {
blocks_raw := RawAllocator.new()
allocs_raw := RawAllocator.new()
blocks := Vec(Block, RawAllocator).new(&blocks_raw)
allocs := Vec([]u8, RawAllocator).new(TypeOf(&allocs_raw).uninit())
self := Self.(
blocks,
allocs,
blocks_raw,
allocs_raw,
)
self.blocks.allocator = &self.blocks_raw
self.allocs.allocator = &self.allocs_raw
return self
}
deinit := fn(self: ^Self): void {
self.allocs.deinit()
loop if self.blocks.len() == 0 break else {
// ! (compiler) bug: not logging here causes double free or corruption... wtf...
log.debug("here")
block := @unwrap(self.blocks.pop())
match Target.current() {
.AbleOS => Target.dealloc(block.block.ptr, block.block.len),
.LibC => Target.dealloc(block.block.ptr),
}
}
self.blocks.deinit()
self.blocks_raw.deinit()
self.allocs_raw.deinit()
}
alloc := fn(self: ^Self, $T: type, count: uint): ?^T {
This := Type(T)
size := This.size() * count
i := 0
loop if i >= self.blocks.len() break else {
defer i += 1
block := @unwrap(self.blocks.get_ref(i))
if block.largest_free.len >= size {
ptr := self._update_block(block, size)
self.allocs.push(ptr[0..size])
return @bitcast(ptr)
}
}
block_size := Target.calculate_pages(size) * Target.page_size()
// ! (libc) (compiler) bug: null check broken. unwrapping.
block_ptr := @unwrap(Target.alloc(block_size))
block := Block.(block_ptr[0..block_size], block_ptr[size..block_size])
// ! (libc) (compiler) wtf bug is this? can't push anything to blocks...
self.blocks.push(block)
self.allocs.push(block_ptr[0..size])
log.debug("pushed to allocs")
log.print(size)
log.print(block_size)
return @bitcast(block_ptr + size)
}
alloc_zeroed := fn(self: ^Self, $T: type, count: uint): ?^T {
ptr := Self.alloc_zeroed(T, count)
if ptr == null return null
Target.memset(ptr, 0, count * @sizeof(T))
return ptr
}
realloc := fn(self: ^Self, $T: type, ptr: ^T, new_count: uint): ?^T {
log.error("todo: realloc")
die
return null
}
dealloc := fn(self: ^Self, $T: type, ptr: ^T): void {
log.error("todo: dealloc")
die
}
/// SAFETY: assumes that the block has enough space for `size`
_update_block := fn(self: ^Self, block: ^Block, size: uint): ^u8 {
block.largest_free = block.largest_free[0..size]
ptr := block.largest_free.ptr
// _ = quicksort(_compare_ptr, self.allocs.slice, 0, self.allocs.len() - 1)
self.allocs.sort_with(_compare_ptr)
log.print(self.allocs.slice)
i := 0
loop if i == self.allocs.len() - 1 break else {
defer i += 1
alloc_a := @unwrap(self.allocs.get_ref(i))
if alloc_a.ptr < block.block.ptr {
i += 1
continue
} else if alloc_a.ptr > block.block.ptr + block.block.len {
break
}
len: uint = 0
alloc_b := @unwrap(self.allocs.get_ref(i + 1))
pt2 := alloc_a.ptr + alloc_a.size
if alloc_b.ptr > block.block.ptr {
len = block.block.ptr + block.block.len - pt2
} else {
len = alloc_b.ptr - pt2
}
if len > block.largest_free.len {
block.largest_free = (*alloc_a)[0..len]
}
log.debug("here 2")
}
return ptr
}
}
$_compare_ptr := fn(lhs: @Any(), rhs: @Any()): bool {
return lhs.ptr < rhs.ptr
}

View file

@ -1,29 +1,30 @@
.{Config, Target, Type, log, collections: .{Vec}} := @use("../lib.hb");
RawAllocator := struct {
ptr: ^void,
ptr: ^u8,
size: uint,
$new := fn(): Self return .(Type(^void).uninit(), 0)
$new := fn(): Self return .(Type(^u8).uninit(), 0)
deinit := fn(self: ^Self): void {
self.free(void, Type(^void).uninit());
self.dealloc(u8, Type(^u8).uninit());
*self = Self.new()
log.debug("deinit: raw allocator")
}
alloc := fn(self: ^Self, $T: type, count: uint): ?^T {
ptr := Target.malloc(count * @sizeof(T))
if ptr != null {
self.ptr = ptr
self.size = count * @sizeof(T)
}
// ! (libc) (compiler) bug: null check broken. unwrapping.
ptr := @unwrap(Target.alloc(count * @sizeof(T)))
self.ptr = ptr
self.size = count * @sizeof(T)
log.debug("allocated raw")
return @bitcast(ptr)
}
alloc_zeroed := fn(self: ^Self, $T: type, count: uint): ?^T {
ptr := Target.calloc(count * @sizeof(T))
if ptr != null {
self.ptr = ptr
self.size = count * @sizeof(T)
}
// ! (libc) (compiler) bug: null check broken. unwrapping.
ptr := @unwrap(Target.alloc_zeroed(count * @sizeof(T)))
self.ptr = ptr
self.size = count * @sizeof(T)
return @bitcast(ptr)
}
realloc := fn(self: ^Self, $T: type, ptr: ^T, count: uint): ?^T {
@ -31,11 +32,12 @@ RawAllocator := struct {
log.debug("reallocated raw")
match Target.current() {
.LibC => {
new_ptr := Target.realloc(self.ptr, count * @sizeof(T))
if new_ptr != null {
self.ptr = new_ptr
self.size = count * @sizeof(T)
}
// ! (libc) (compiler) bug: null check broken. unwrapping.
new_ptr := @unwrap(Target.realloc(self.ptr, count * @sizeof(T)))
// if new_ptr != null {
self.ptr = new_ptr
self.size = count * @sizeof(T)
// }
return @bitcast(new_ptr)
},
.AbleOS => {
@ -49,11 +51,11 @@ RawAllocator := struct {
}
}
// ! INLINING THIS FUNCTION CAUSES MISCOMPILATION!! DO NOT INLINE IT!! :) :) :)
free := fn(self: ^Self, $T: type, ptr: ^T): void {
dealloc := fn(self: ^Self, $T: type, ptr: ^T): void {
if self.size == 0 return;
match Target.current() {
.LibC => Target.free(self.ptr),
.AbleOS => Target.free(self.ptr, self.size),
.LibC => Target.dealloc(self.ptr),
.AbleOS => Target.dealloc(self.ptr, self.size),
}
log.debug("freed raw")
}

View file

@ -2,7 +2,7 @@
.{RawAllocator} := @use("lib.hb")
Allocation := struct {
ptr: ^void,
ptr: ^u8,
len: uint,
}
@ -20,8 +20,8 @@ SimpleAllocator := struct {
alloced := self.allocations.pop()
if alloced == null continue
match Target.current() {
.LibC => Target.free(alloced.ptr),
.AbleOS => Target.free(alloced.ptr, alloced.len),
.LibC => Target.dealloc(alloced.ptr),
.AbleOS => Target.dealloc(alloced.ptr, alloced.len),
}
}
@ -30,7 +30,7 @@ SimpleAllocator := struct {
log.debug("deinit: allocator")
}
alloc := fn(self: ^Self, $T: type, count: uint): ?^T {
ptr := Target.malloc(count * @sizeof(T))
ptr := Target.alloc(count * @sizeof(T))
if Target.current() == .AbleOS {
if ptr != null self.allocations.push(.(ptr, count * @sizeof(T)))
}
@ -39,7 +39,7 @@ SimpleAllocator := struct {
return @bitcast(ptr)
}
alloc_zeroed := fn(self: ^Self, $T: type, count: uint): ?^T {
ptr := Target.calloc(count * @sizeof(T))
ptr := Target.alloc_zeroed(count * @sizeof(T))
if Target.current() == .AbleOS {
if ptr != null self.allocations.push(.(ptr, count * @sizeof(T)))
}
@ -48,9 +48,6 @@ SimpleAllocator := struct {
realloc := fn(self: ^Self, $T: type, ptr: ^T, count: uint): ?^T {
match Target.current() {
.AbleOS => {
// temporary optimisation, ableos only gives whole pages.
// this prevents reallocating 1 page over and over
if count * @sizeof(T) < Target.PAGE_SIZE return ptr
alloced := self._find_and_remove(@bitcast(ptr))
if alloced == null return null
new_ptr := Target.realloc(@bitcast(ptr), alloced.len, count * @sizeof(T))
@ -68,19 +65,19 @@ SimpleAllocator := struct {
},
}
}
free := fn(self: ^Self, $T: type, ptr: ^T): void {
dealloc := fn(self: ^Self, $T: type, ptr: ^T): void {
match Target.current() {
.AbleOS => {
alloced := self._find_and_remove(@bitcast(ptr))
if alloced != null Target.free(@bitcast(ptr), alloced.len)
if alloced != null Target.dealloc(@bitcast(ptr), alloced.len)
},
.LibC => {
Target.free(@bitcast(ptr))
Target.dealloc(@bitcast(ptr))
},
}
log.debug("freed")
}
_find_and_remove := fn(self: ^Self, ptr: ^void): ?Allocation {
_find_and_remove := fn(self: ^Self, ptr: ^u8): ?Allocation {
i := 0
loop if i == self.allocations.len() break else {
defer i += 1

View file

@ -1,4 +1,4 @@
.{memmove, Type, log, alloc} := @use("../lib.hb");
.{memmove, Type, log, alloc, quicksort, compare, iter} := @use("../lib.hb");
Vec := fn($T: type, $Allocator: type): type return struct {
slice: []T,
@ -7,7 +7,7 @@ Vec := fn($T: type, $Allocator: type): type return struct {
$new := fn(allocator: ^Allocator): Self return .{slice: Type([]T).uninit(), allocator, cap: 0}
deinit := fn(self: ^Self): void {
// currently does not handle deinit of T if T allocates memory
if self.cap > 0 self.allocator.free(T, self.slice.ptr)
if self.cap > 0 self.allocator.dealloc(T, self.slice.ptr)
self.slice = Type([]T).uninit()
self.cap = 0
if Allocator == alloc.RawAllocator {
@ -53,6 +53,19 @@ Vec := fn($T: type, $Allocator: type): type return struct {
self.slice.len -= 1
return temp
}
find := fn(self: ^Self, rhs: T): ?uint {
i := 0
loop if self.get(i) == rhs return i else if i == self.slice.len return null else i += 1
}
$into_iter := fn(self: Self): iter.Iterator(iter.SliceIter(T)) {
return .(.(self.slice, 0))
}
$sort := fn(self: ^Self): void {
_ = quicksort(compare, self.slice, 0, self.slice.len - 1)
}
$sort_with := fn(self: ^Self, $func: type): void {
_ = quicksort(func, self.slice, 0, self.slice.len - 1)
}
$len := fn(self: ^Self): uint return self.slice.len
$capacity := fn(self: ^Self): uint return self.capacity
}

68
src/lily/iter.hb Normal file
View file

@ -0,0 +1,68 @@
.{TypeOf} := @use("lib.hb")
IterNext := fn($T: type): type return struct {finished: bool, val: T}
/// Iterator struct. Implements iterator stuff for you if you implement `into_iter` for your struct.
Iterator := fn($T: type): type {
$A := @TypeOf(T.next(idk))
return struct {
inner: T,
$next := fn(self: ^Self): A {
return self.inner.next()
}
$map := fn(self: Self, $_map: type): Iterator(Map(T, _map)) {
return .(.(self))
}
$enumerate := fn(self: Self): Iterator(Enumerate(T)) {
return .(.(self, 0))
}
for_each := fn(self: ^Self, $_for_each: type): void {
loop {
x := self.next()
if x.finished break
_ = _for_each(x.val)
}
}
}
}
/// Map is lazy. Simply calling `my_iter.map(func)` will not cause any execution.
Map := fn($T: type, $_map: type): type {
$M := @TypeOf(_map(@as(@TypeOf(T.next(idk).val), idk)))
return struct {
iter: Iterator(T),
next := fn(self: ^Self): IterNext(M) {
x := self.iter.inner.next()
return .(x.finished, _map(x.val))
}
}
}
IterEnumerate := fn($T: type): type return struct {n: uint, val: T}
Enumerate := fn($T: type): type {
$A := IterEnumerate(@TypeOf(T.next(idk).val))
return struct {
iter: Iterator(T),
n: uint,
next := fn(self: ^Self): IterNext(A) {
self.n += 1
x := self.iter.inner.next()
return .(x.finished, .(self.n, x.val))
}
}
}
SliceIter := fn($T: type): type return struct {
slice: []T,
cursor: uint,
next := fn(self: ^Self): IterNext(?T) {
if self.cursor >= self.slice.len return .(true, null)
tmp := self.slice[self.cursor]
self.cursor += 1
return .(false, tmp)
}
}

View file

@ -4,10 +4,10 @@ Version := struct {
patch: uint,
}
$VERSION := Version(0, 0, 4)
$VERSION := Version(0, 0, 5)
Config := struct {
$DEBUG := false
$DEBUG := true
$DEBUG_ASSERTIONS := false
$MIN_LOGLEVEL := log.LogLevel.Info
@ -19,34 +19,6 @@ Config := struct {
}
}
Target := enum {
LibC,
AbleOS,
ableos := @use("targets/ableos.hb")
libc := @use("targets/libc.hb")
$current := fn(): Self {
// This captures all HBVM targets, but for now only AbleOS is supported
if @target("*-virt-unknown") {
return .AbleOS
}
// Assume that unknown targets have libc
return .LibC
}
$Lib := fn(self: Self): type {
match self {
.LibC => return Self.libc,
.AbleOS => return Self.ableos,
}
}
/* ! memmove, memcpy, memset, exit, currently suffixed with `_w` to distinguish them from the wrapper functions */;
/* todo: reorganise these */;
.{malloc, calloc, realloc, free, memmove: memmove_w, memcpy: memcpy_w, memset: memset_w, exit: exit_w, getrandom} := Self.Lib(Self.current());
.{printf_str} := Self.Lib(.LibC);
.{PAGE_SIZE, LogMsg} := Self.Lib(.AbleOS)
}
// ----------------------------------------------------
collections := @use("collections/lib.hb")
@ -56,9 +28,11 @@ alloc := @use("alloc/lib.hb")
hash := @use("hash/lib.hb")
rand := @use("rand/lib.hb")
math := @use("math.hb")
iter := @use("iter.hb")
log := @use("log.hb")
fmt := @use("fmt.hb");
.{Target} := @use("targets/lib.hb");
.{print, printf} := log;
.{Type, TypeOf} := @use("type.hb")
@ -88,4 +62,37 @@ $memmove := fn(dest: @Any(), src: @Any(), size: uint): void {
$memset := fn(dest: @Any(), src: u8, size: uint): void {
if TypeOf(dest).kind() != .Pointer @error("memset requires a pointer")
Target.memset_w(@bitcast(dest), src, size)
}
_qs_partition := fn($func: type, array: @Any(), start: uint, end: uint): uint {
pivot := array[end]
i := start
j := start
loop if j >= end break else {
defer j += 1
if func(array[j], pivot) {
temp := array[i]
array[i] = array[j]
array[j] = temp
i += 1
}
}
temp := array[i]
array[i] = array[end]
array[end] = temp
return i
}
/// Can sort in place if `&array` is passed rather than `array`
/// For sorting slices in place, do not pass `&slice`, pass `slice` instead.
quicksort := fn($func: type, array: @Any(), start: uint, end: uint): @TypeOf(array) {
if start >= end return array;
pivot_index := _qs_partition(func, array, start, end)
if pivot_index > 0 array = quicksort(func, array, start, pivot_index - 1)
array = quicksort(func, array, pivot_index + 1, end)
return array
}
$compare := fn(lhs: @Any(), rhs: @Any()): bool {
return lhs < rhs
}

View file

@ -1 +1 @@
.{SimpleRandom} := @use("simple_random.hb")
.{SimpleRandom} := @use("simple.hb")

View file

@ -4,6 +4,7 @@
SimpleRandom := struct {
seed: uint,
$new := fn(): Self return Self.(0)
$default := fn(): Self return Self.(0)
$deinit := fn(self: ^Self): void {
self.seed = 0
}

View file

@ -18,17 +18,17 @@ log: `ecall 3(buf), 1(log), loglevel:u8, string:*const u8, strlen:u64`<br>
> formats and then copies `strlen` of `string` into the serial output
### `lily.Target.AbleOS`:
malloc: `ecall 3(buf), 2(mem), 0(alloc), page_count:u64, zeroed:bool=false`
alloc: `ecall 3(buf), 2(mem), 0(alloc), page_count:u64, zeroed:bool=false`
> returns `Option<*mut u8>` to an available contiguous chunk of memory, sized in `4096` byte (align `8`) pages. it is undefined behaviour to use size zero.
calloc: `ecall 3(buf), 2(mem), 0(alloc), page_count:u64, zeroed:bool=true`<br>
> same as malloc, except filled with zeroes.
alloc_zeroed: `ecall 3(buf), 2(mem), 0(alloc), page_count:u64, zeroed:bool=true`<br>
> same as alloc, except filled with zeroes.
realloc: `ecall 3(buf), 2(mem), 7(realloc), page_count:u64, page_count_new:u64, ptr:*const u8, ptr_new:*const u8`<br>
> resizes an existing contiguous chunk of memory allocated via `malloc`, `calloc`, or `realloc`. contents remain the same. it is undefined behaviour to use size zero or a `null` pointer. returns a new `Option<*mut u8>` after resizing.
> resizes an existing contiguous chunk of memory allocated via `alloc`, `alloc_zeroed`, or `realloc`. contents remain the same. it is undefined behaviour to use size zero or a `null` pointer. returns a new `Option<*mut u8>` after resizing.
free: `ecall 3(buf), 2(mem), 1(free), page_count:u64, ptr:*const u8`<br>
> releases an existing contiguous chunk of memory allocated via `malloc`, `calloc`, or `realloc`. it is undefined behaviour to use size zero or a `null` pointer.
dealloc: `ecall 3(buf), 2(mem), 1(dealloc), page_count:u64, ptr:*const u8`<br>
> releases an existing contiguous chunk of memory allocated via `alloc`, `alloc_zeroed`, or `realloc`. it is undefined behaviour to use size zero or a `null` pointer.
memcpy: `ecall 3(buf), 2(mem), 4(memcopy), size:u64, src:*const u8, dest:*const u8`<br>
> copies `size` of `src` into `dest`. `src` and `dest` must not be overlapping. it is undefined behaviour to use size zero or a `null` pointer.

View file

@ -1,47 +1,49 @@
.{LogLevel} := @use("../lib.hb").log
$PAGE_SIZE := 4096
$page_size := fn(): uint {
return 4096
}
LogMsg := packed struct {level: LogLevel, string: ^u8, strlen: uint}
$calculate_pages := fn(size: uint): uint {
return (size + PAGE_SIZE - 1) / PAGE_SIZE
return (size + page_size() - 1) / page_size()
}
AllocMsg := packed struct {a: u8, count: uint, zeroed: bool}
$malloc := fn(size: uint): ?^void {
$alloc := fn(size: uint): ?^u8 {
return @eca(3, 2, &AllocMsg.(0, calculate_pages(size), false), @sizeof(AllocMsg))
}
$calloc := fn(size: uint): ?^void {
$alloc_zeroed := fn(size: uint): ?^u8 {
return @eca(3, 2, &AllocMsg.(0, calculate_pages(size), true), @sizeof(AllocMsg))
}
ReallocMsg := packed struct {a: u8, count: uint, count_new: uint, ptr: ^void}
$realloc := fn(ptr: ^void, size: uint, size_new: uint): ?^void {
ReallocMsg := packed struct {a: u8, count: uint, count_new: uint, ptr: ^u8}
$realloc := fn(ptr: ^u8, size: uint, size_new: uint): ?^u8 {
return @eca(3, 2, &ReallocMsg.(7, calculate_pages(size), calculate_pages(size_new), ptr), @sizeof(ReallocMsg))
}
FreeMsg := packed struct {a: u8, count: uint, ptr: ^void}
$free := fn(ptr: ^void, size: uint): void {
FreeMsg := packed struct {a: u8, count: uint, ptr: ^u8}
$dealloc := fn(ptr: ^u8, size: uint): void {
return @eca(3, 2, &FreeMsg.(1, calculate_pages(size), ptr), @sizeof(FreeMsg))
}
CopyMsg := packed struct {a: u8, count: uint, src: ^void, dest: ^void}
$memcpy := fn(dest: ^void, src: ^void, size: uint): void {
CopyMsg := packed struct {a: u8, count: uint, src: ^u8, dest: ^u8}
$memcpy := fn(dest: ^u8, src: ^u8, size: uint): void {
return @eca(3, 2, &CopyMsg.(4, size, src, dest), @sizeof(CopyMsg))
}
SetMsg := packed struct {a: u8, count: uint, size: uint, src: ^void, dest: ^void}
$memset := fn(dest: ^void, src: u8, size: uint): void {
SetMsg := packed struct {a: u8, count: uint, size: uint, src: ^u8, dest: ^u8}
$memset := fn(dest: ^u8, src: u8, size: uint): void {
return @eca(3, 2, &SetMsg.(5, size, 1, @bitcast(&src), dest), @sizeof(SetMsg))
}
$memmove := fn(dest: ^void, src: ^void, size: uint): void {
$memmove := fn(dest: ^u8, src: ^u8, size: uint): void {
return @eca(3, 2, &CopyMsg.(6, size, src, dest), @sizeof(CopyMsg))
}
$getrandom := fn(dest: ^void, size: uint): void return @eca(3, 4, dest, size)
$getrandom := fn(dest: ^u8, size: uint): void return @eca(3, 4, dest, size)
$exit := fn(code: int): void {
}

27
src/lily/targets/lib.hb Normal file
View file

@ -0,0 +1,27 @@
Target := enum {
LibC,
AbleOS,
ableos := @use("ableos.hb")
libc := @use("libc.hb")
$current := fn(): Self {
// This captures all HBVM targets, but for now only AbleOS is supported
if @target("*-virt-unknown") {
return .AbleOS
}
// Assume that unknown targets have libc
return .LibC
}
$Lib := fn(self: Self): type {
match self {
.AbleOS => return Self.ableos,
.LibC => return Self.libc,
}
}
/* ! memmove, memcpy, memset, exit, currently suffixed with `_w` to distinguish them from the wrapper functions */;
/* todo: reorganise these */;
.{alloc, alloc_zeroed, realloc, dealloc, memmove: memmove_w, memcpy: memcpy_w, memset: memset_w, exit: exit_w, getrandom, page_size, calculate_pages} := Self.Lib(Self.current());
.{printf_str} := Self.Lib(.LibC);
.{LogMsg} := Self.Lib(.AbleOS)
}

View file

@ -1,10 +1,19 @@
malloc := fn(size: uint): ?^void @import()
calloc := fn(size: uint): ?^void @import()
realloc := fn(ptr: ^void, size: uint): ?^void @import()
free := fn(ptr: ^void): void @import()
memmove := fn(dest: ^void, src: ^void, size: uint): void @import()
memcpy := fn(dest: ^void, src: ^void, size: uint): void @import()
memset := fn(dest: ^void, src: u8, size: uint): void @import()
alloc := fn(size: uint): ?^u8 @import("malloc")
alloc_zeroed := fn(size: uint): ?^u8 @import("calloc")
realloc := fn(ptr: ^u8, size: uint): ?^u8 @import()
dealloc := fn(ptr: ^u8): void @import("free")
memmove := fn(dest: ^u8, src: ^u8, size: uint): void @import()
memcpy := fn(dest: ^u8, src: ^u8, size: uint): void @import()
memset := fn(dest: ^u8, src: u8, size: uint): void @import()
exit := fn(code: int): void @import()
printf_str := fn(str0: ^u8, strlen: uint, str1: ^u8): void @import("printf")
getrandom := fn(dest: ^void, size: uint): void @import()
getrandom := fn(dest: ^u8, size: uint): void @import()
// temp
$page_size := fn(): uint {
return 4096
}
// also temp
$calculate_pages := fn(size: uint): uint {
return (size + page_size() - 1) / page_size()
}

View file

@ -91,7 +91,7 @@ Type := fn($T: type): type return struct {
$uninit := fn(): T {
match Self.kind() {
.Pointer => return @bitcast(0),
.Slice => return @bitcast(@as(^void, @bitcast(0))[0..0]),
.Slice => return Type(^Self.Child().This()).uninit()[0..0],
.Array => return idk,
.Builtin => return idk,
.Struct => return idk,

View file

@ -9,31 +9,39 @@ Hasher := lily.hash.FoldHasher
// ! HashMap only works on AbleOS target (due to compiler bugs)
main := fn(argc: uint, argv: []^void): uint {
$some_sorter := fn(lhs: @Any(), rhs: @Any()): bool {
return lhs < rhs
}
$add_one := fn(x: ?uint): ?uint {
return @unwrap(x) + 1
}
$print := fn(next: @Any()): void {
lily.print(@as(@ChildOf(@TypeOf(next)), @unwrap(next)))
}
main := fn(): uint {
allocator := Allocator.new()
defer allocator.deinit()
map := HashMap(uint, uint, Hasher, Allocator).new(&allocator)
defer map.deinit()
vec := Vec(uint, Allocator).new(&allocator)
defer vec.deinit()
rand := Random.default()
defer rand.deinit()
_ = map.insert(10, 20)
ptr := map.insert(10, 30)
good := 0
if ptr == @unwrap(map.get_ref(10)) {
lily.log.info("good")
good = *ptr
lily.print(good)
} else {
lily.log.error("bad")
i := 0
loop if i == 100 break else {
defer i += 1
vec.push(rand.any(u8))
}
// note: this does not affect the values of the vec itself
// the `add_one` here simply changes the value before printing.
// ! (libc) (compiler) bug: prints same numbers several times on libc. does not occur on ableos.
vec.into_iter().map(add_one).for_each(print)
other := map.remove(10)
if @unwrap(other) == good {
lily.log.info("good 2")
} else {
lily.log.error("bad 2")
}
// equivalent to vec.sort() when some_sorter == `lhs < rhs`
// uses lily.quicksort under the hood
vec.sort_with(some_sorter)
return 0
}