forked from AbleOS/ableos
other stuff
This commit is contained in:
parent
0bd327ed3e
commit
655aabd686
46
after-ops.txt
Normal file
46
after-ops.txt
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 544
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 3200
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1032
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 224
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 3240
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1144
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1352
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1400
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1128
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1632
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1528
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 2496
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 2440
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 600
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
test parser::tests::arithmetic ... Dbg: dropping chunk of size: 544
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
test parser::tests::example ... Dbg: dropping chunk of size: 224
|
||||||
|
Dbg: deallocating full chunk
|
46
befor-ops.txt
Normal file
46
befor-ops.txt
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 936
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 4040
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1112
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 296
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 4328
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1464
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1616
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1864
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 1504
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 2160
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 2000
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 3048
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 2960
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
Dbg: dropping chunk of size: 0
|
||||||
|
Dbg: dropping chunk of size: 848
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
test parser::tests::arithmetic ... Dbg: dropping chunk of size: 936
|
||||||
|
Dbg: deallocating full chunk
|
||||||
|
test parser::tests::example ... Dbg: dropping chunk of size: 296
|
||||||
|
Dbg: deallocating full chunk
|
|
@ -1,4 +1,4 @@
|
||||||
Vec := fn(Elem: type): type {
|
Vec := fn($Elem: type): type {
|
||||||
return struct {
|
return struct {
|
||||||
data: ^Elem,
|
data: ^Elem,
|
||||||
len: uint,
|
len: uint,
|
||||||
|
|
|
@ -7,7 +7,7 @@ use hbvm::Vm;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
ident::{self, Ident},
|
ident::{self, Ident},
|
||||||
parser::{idfl, ExprRef},
|
parser::{idfl, ExprRef, FileId, Pos},
|
||||||
HashMap,
|
HashMap,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ fn align_up(value: u64, align: u64) -> u64 {
|
||||||
}
|
}
|
||||||
|
|
||||||
struct ItemId {
|
struct ItemId {
|
||||||
file: parser::FileId,
|
file: FileId,
|
||||||
expr: parser::ExprRef,
|
expr: parser::ExprRef,
|
||||||
id: u32,
|
id: u32,
|
||||||
}
|
}
|
||||||
|
@ -299,6 +299,7 @@ type_kind! {
|
||||||
Pointer,
|
Pointer,
|
||||||
Func,
|
Func,
|
||||||
Global,
|
Global,
|
||||||
|
Module,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -508,6 +509,7 @@ impl<'a> std::fmt::Display for TypeDisplay<'a> {
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
use TypeKind as TK;
|
use TypeKind as TK;
|
||||||
match TK::from_ty(self.ty) {
|
match TK::from_ty(self.ty) {
|
||||||
|
TK::Module(idx) => write!(f, "module{}", idx),
|
||||||
TK::Builtin(ty) => write!(f, "{}", bt::to_str(ty)),
|
TK::Builtin(ty) => write!(f, "{}", bt::to_str(ty)),
|
||||||
TK::Pointer(ty) => {
|
TK::Pointer(ty) => {
|
||||||
write!(f, "^{}", self.rety(self.codegen.pointers[ty as usize]))
|
write!(f, "^{}", self.rety(self.codegen.pointers[ty as usize]))
|
||||||
|
@ -561,13 +563,13 @@ struct Linked {
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
struct SymKey {
|
struct SymKey {
|
||||||
id: Ident,
|
id: Ident,
|
||||||
file: parser::FileId,
|
file: FileId,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Default)]
|
#[derive(Default)]
|
||||||
pub struct Codegen {
|
pub struct Codegen {
|
||||||
cf: parser::Ast,
|
cf: parser::Ast,
|
||||||
cf_id: parser::FileId,
|
cf_id: FileId,
|
||||||
|
|
||||||
ret: Type,
|
ret: Type,
|
||||||
ret_reg: Option<Reg>,
|
ret_reg: Option<Reg>,
|
||||||
|
@ -634,7 +636,7 @@ impl Codegen {
|
||||||
|
|
||||||
pub fn generate(&mut self) {
|
pub fn generate(&mut self) {
|
||||||
self.lazy_init();
|
self.lazy_init();
|
||||||
self.find_and_declare(0, Err("main"));
|
self.find_and_declare(0, 0, Err("main"));
|
||||||
self.code.prelude();
|
self.code.prelude();
|
||||||
self.complete_call_graph();
|
self.complete_call_graph();
|
||||||
}
|
}
|
||||||
|
@ -750,7 +752,7 @@ impl Codegen {
|
||||||
TypeDisplay::new(self, ty)
|
TypeDisplay::new(self, ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn unwrap_struct(&self, ty: Type, pos: parser::Pos, context: impl std::fmt::Display) -> Type {
|
fn unwrap_struct(&self, ty: Type, pos: Pos, context: impl std::fmt::Display) -> Type {
|
||||||
match TypeKind::from_ty(ty) {
|
match TypeKind::from_ty(ty) {
|
||||||
TypeKind::Struct(idx) => idx,
|
TypeKind::Struct(idx) => idx,
|
||||||
_ => self.report(
|
_ => self.report(
|
||||||
|
@ -760,8 +762,7 @@ impl Codegen {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn offset_of(&self, pos: parser::Pos, ty: Type, field: Result<&str, usize>) -> (u64, Type) {
|
fn offset_of(&self, pos: Pos, idx: u32, field: Result<&str, usize>) -> (u64, Type) {
|
||||||
let idx = self.unwrap_struct(ty, pos, "field access");
|
|
||||||
let record = &self.structs[idx as usize];
|
let record = &self.structs[idx as usize];
|
||||||
let mut offset = 0;
|
let mut offset = 0;
|
||||||
for (i, &(ref name, ty)) in record.fields.iter().enumerate() {
|
for (i, &(ref name, ty)) in record.fields.iter().enumerate() {
|
||||||
|
@ -897,7 +898,11 @@ impl Codegen {
|
||||||
|
|
||||||
match value.loc {
|
match value.loc {
|
||||||
Loc::RegRef(reg) | Loc::Reg(LinReg(reg, ..)) => self.vm.read_reg(reg).0 as _,
|
Loc::RegRef(reg) | Loc::Reg(LinReg(reg, ..)) => self.vm.read_reg(reg).0 as _,
|
||||||
_ => unreachable!(),
|
Loc::Deref(LinReg(reg, ..), .., off) | Loc::DerefRef(reg, .., off) => {
|
||||||
|
let ptr = unsafe { (self.vm.read_reg(reg).0 as *const u8).add(off as _) };
|
||||||
|
unsafe { std::ptr::read(ptr as *const Type) }
|
||||||
|
}
|
||||||
|
v => unreachable!("{v:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -992,6 +997,7 @@ impl Codegen {
|
||||||
use instrs as i;
|
use instrs as i;
|
||||||
|
|
||||||
let value = match *expr {
|
let value = match *expr {
|
||||||
|
E::Mod { id, .. } => Some(Value::ty(TypeKind::Module(id).encode())),
|
||||||
E::Struct {
|
E::Struct {
|
||||||
fields, captured, ..
|
fields, captured, ..
|
||||||
} => {
|
} => {
|
||||||
|
@ -1010,7 +1016,7 @@ impl Codegen {
|
||||||
.map(|&id| E::Ident {
|
.map(|&id| E::Ident {
|
||||||
id,
|
id,
|
||||||
name: "booodab",
|
name: "booodab",
|
||||||
index: u32::MAX,
|
index: u16::MAX,
|
||||||
})
|
})
|
||||||
.map(|expr| self.expr(&expr))
|
.map(|expr| self.expr(&expr))
|
||||||
.collect::<Option<Vec<_>>>()?;
|
.collect::<Option<Vec<_>>>()?;
|
||||||
|
@ -1217,7 +1223,7 @@ impl Codegen {
|
||||||
}
|
}
|
||||||
|
|
||||||
for (i, (name, field)) in fields.iter().enumerate() {
|
for (i, (name, field)) in fields.iter().enumerate() {
|
||||||
let (offset, ty) = self.offset_of(field.pos(), ty, name.ok_or(i));
|
let (offset, ty) = self.offset_of(field.pos(), stuct, name.ok_or(i));
|
||||||
let loc = loc.offset_ref(offset);
|
let loc = loc.offset_ref(offset);
|
||||||
self.expr_ctx(field, Ctx::Dest(Value { ty, loc }))?;
|
self.expr_ctx(field, Ctx::Dest(Value { ty, loc }))?;
|
||||||
}
|
}
|
||||||
|
@ -1225,6 +1231,7 @@ impl Codegen {
|
||||||
return Some(Value { ty, loc });
|
return Some(Value { ty, loc });
|
||||||
}
|
}
|
||||||
E::Field { target, field } => {
|
E::Field { target, field } => {
|
||||||
|
let checkpoint = self.code.code.len();
|
||||||
let mut tal = self.expr(target)?;
|
let mut tal = self.expr(target)?;
|
||||||
if let TypeKind::Pointer(ty) = TypeKind::from_ty(tal.ty) {
|
if let TypeKind::Pointer(ty) = TypeKind::from_ty(tal.ty) {
|
||||||
tal.ty = self.pointers[ty as usize];
|
tal.ty = self.pointers[ty as usize];
|
||||||
|
@ -1237,10 +1244,29 @@ impl Codegen {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
let (offset, ty) = self.offset_of(target.pos(), tal.ty, Ok(field));
|
|
||||||
|
match TypeKind::from_ty(tal.ty) {
|
||||||
|
TypeKind::Struct(idx) => {
|
||||||
|
let (offset, ty) = self.offset_of(target.pos(), idx, Ok(field));
|
||||||
let loc = tal.loc.offset(offset);
|
let loc = tal.loc.offset(offset);
|
||||||
Some(Value { ty, loc })
|
Some(Value { ty, loc })
|
||||||
}
|
}
|
||||||
|
TypeKind::Builtin(bt::TYPE) => {
|
||||||
|
self.code.code.truncate(checkpoint);
|
||||||
|
match TypeKind::from_ty(self.ty(target)) {
|
||||||
|
TypeKind::Module(idx) => Some(Value::ty(
|
||||||
|
self.find_and_declare(target.pos(), idx, Err(field))
|
||||||
|
.encode(),
|
||||||
|
)),
|
||||||
|
_ => todo!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
smh => self.report(
|
||||||
|
target.pos(),
|
||||||
|
format_args!("the field operation is not supported: {smh:?}"),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
}
|
||||||
E::UnOp {
|
E::UnOp {
|
||||||
op: T::Band,
|
op: T::Band,
|
||||||
val,
|
val,
|
||||||
|
@ -1370,10 +1396,13 @@ impl Codegen {
|
||||||
}
|
}
|
||||||
E::Ident { id, .. } => match self
|
E::Ident { id, .. } => match self
|
||||||
.symbols
|
.symbols
|
||||||
.get(&SymKey { id, file: 0 })
|
.get(&SymKey {
|
||||||
|
id,
|
||||||
|
file: self.cf_id,
|
||||||
|
})
|
||||||
.copied()
|
.copied()
|
||||||
.map(TypeKind::from_ty)
|
.map(TypeKind::from_ty)
|
||||||
.unwrap_or_else(|| self.find_and_declare(0, Ok(id)))
|
.unwrap_or_else(|| self.find_and_declare(ident::pos(id), self.cf_id, Ok(id)))
|
||||||
{
|
{
|
||||||
TypeKind::Global(id) => self.handle_global(id),
|
TypeKind::Global(id) => self.handle_global(id),
|
||||||
tk => Some(Value::ty(tk.encode())),
|
tk => Some(Value::ty(tk.encode())),
|
||||||
|
@ -1637,7 +1666,7 @@ impl Codegen {
|
||||||
|
|
||||||
match ctx {
|
match ctx {
|
||||||
Ctx::Dest(dest) => {
|
Ctx::Dest(dest) => {
|
||||||
_ = self.assert_ty(expr.pos(), dest.ty, value.ty);
|
_ = self.assert_ty(expr.pos(), value.ty, dest.ty);
|
||||||
self.assign(dest.ty, dest.loc, value.loc)?;
|
self.assign(dest.ty, dest.loc, value.loc)?;
|
||||||
Some(Value {
|
Some(Value {
|
||||||
ty: dest.ty,
|
ty: dest.ty,
|
||||||
|
@ -1805,6 +1834,11 @@ impl Codegen {
|
||||||
|
|
||||||
match size {
|
match size {
|
||||||
0 => {}
|
0 => {}
|
||||||
|
..=8 if let Loc::Imm(imm) = left
|
||||||
|
&& let Loc::RegRef(reg) = right =>
|
||||||
|
{
|
||||||
|
self.code.encode(instrs::li64(reg, imm))
|
||||||
|
}
|
||||||
..=8 => {
|
..=8 => {
|
||||||
let lhs = self.loc_to_reg(left, size);
|
let lhs = self.loc_to_reg(left, size);
|
||||||
match right {
|
match right {
|
||||||
|
@ -1860,9 +1894,21 @@ impl Codegen {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_and_declare(&mut self, file: parser::FileId, name: Result<Ident, &str>) -> TypeKind {
|
fn find_and_declare(&mut self, pos: Pos, file: FileId, name: Result<Ident, &str>) -> TypeKind {
|
||||||
let f = self.files[file as usize].clone();
|
let f = self.files[file as usize].clone();
|
||||||
let (expr, id) = f.find_decl(name).expect("TODO: error");
|
let Some((expr, id)) = f.find_decl(name) else {
|
||||||
|
self.report(
|
||||||
|
pos,
|
||||||
|
match name {
|
||||||
|
Ok(_) => format!("undefined indentifier"),
|
||||||
|
Err("main") => {
|
||||||
|
format!("compilation root is missing main function: {f}")
|
||||||
|
}
|
||||||
|
Err(name) => todo!("somehow we did not handle: {name:?}"),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
};
|
||||||
|
|
||||||
let sym = match expr {
|
let sym = match expr {
|
||||||
E::BinOp {
|
E::BinOp {
|
||||||
left: &E::Ident { .. },
|
left: &E::Ident { .. },
|
||||||
|
@ -2126,7 +2172,7 @@ impl Codegen {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[must_use]
|
#[must_use]
|
||||||
fn assert_ty(&self, pos: parser::Pos, ty: Type, expected: Type) -> Type {
|
fn assert_ty(&self, pos: Pos, ty: Type, expected: Type) -> Type {
|
||||||
if let Some(res) = bt::try_upcast(ty, expected) {
|
if let Some(res) = bt::try_upcast(ty, expected) {
|
||||||
res
|
res
|
||||||
} else {
|
} else {
|
||||||
|
@ -2136,7 +2182,7 @@ impl Codegen {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn report(&self, pos: parser::Pos, msg: impl std::fmt::Display) -> ! {
|
fn report(&self, pos: Pos, msg: impl std::fmt::Display) -> ! {
|
||||||
let (line, col) = self.cf.nlines.line_col(pos);
|
let (line, col) = self.cf.nlines.line_col(pos);
|
||||||
println!("{}:{}:{}: {}", self.cf.path, line, col, msg);
|
println!("{}:{}:{}: {}", self.cf.path, line, col, msg);
|
||||||
unreachable!();
|
unreachable!();
|
||||||
|
@ -2316,7 +2362,7 @@ impl hbvm::mem::Memory for LoggedMem {
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use crate::codegen::LoggedMem;
|
use crate::{codegen::LoggedMem, log};
|
||||||
|
|
||||||
use super::parser;
|
use super::parser;
|
||||||
|
|
||||||
|
@ -2355,6 +2401,7 @@ mod tests {
|
||||||
writeln!(output, "code size: {}", out.len()).unwrap();
|
writeln!(output, "code size: {}", out.len()).unwrap();
|
||||||
writeln!(output, "ret: {:?}", vm.read_reg(1).0).unwrap();
|
writeln!(output, "ret: {:?}", vm.read_reg(1).0).unwrap();
|
||||||
writeln!(output, "status: {:?}", stat).unwrap();
|
writeln!(output, "status: {:?}", stat).unwrap();
|
||||||
|
log::inf!("input lenght: {}", input.len());
|
||||||
}
|
}
|
||||||
|
|
||||||
crate::run_tests! { generate:
|
crate::run_tests! { generate:
|
||||||
|
|
|
@ -86,6 +86,7 @@ macro_rules! gen_token_kind {
|
||||||
gen_token_kind! {
|
gen_token_kind! {
|
||||||
pub enum TokenKind {
|
pub enum TokenKind {
|
||||||
#[patterns]
|
#[patterns]
|
||||||
|
CtIdent,
|
||||||
Ident,
|
Ident,
|
||||||
Number,
|
Number,
|
||||||
Eof,
|
Eof,
|
||||||
|
@ -188,6 +189,12 @@ impl<'a> Lexer<'a> {
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let advance_ident = |s: &mut Self| {
|
||||||
|
while let Some(b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_') = s.peek() {
|
||||||
|
s.advance();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
let kind = match c {
|
let kind = match c {
|
||||||
b'\n' | b'\r' | b'\t' | b' ' => continue,
|
b'\n' | b'\r' | b'\t' | b' ' => continue,
|
||||||
b'0'..=b'9' => {
|
b'0'..=b'9' => {
|
||||||
|
@ -196,19 +203,21 @@ impl<'a> Lexer<'a> {
|
||||||
}
|
}
|
||||||
T::Number
|
T::Number
|
||||||
}
|
}
|
||||||
c @ (b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'@') => {
|
b'@' => {
|
||||||
while let Some(b'a'..=b'z' | b'A'..=b'Z' | b'0'..=b'9' | b'_') = self.peek() {
|
|
||||||
self.advance();
|
|
||||||
}
|
|
||||||
|
|
||||||
if c == b'@' {
|
|
||||||
start += 1;
|
start += 1;
|
||||||
|
advance_ident(self);
|
||||||
T::Driective
|
T::Driective
|
||||||
} else {
|
}
|
||||||
|
b'$' => {
|
||||||
|
start += 1;
|
||||||
|
advance_ident(self);
|
||||||
|
T::CtIdent
|
||||||
|
}
|
||||||
|
b'a'..=b'z' | b'A'..=b'Z' | b'_' => {
|
||||||
|
advance_ident(self);
|
||||||
let ident = &self.bytes[start as usize..self.pos as usize];
|
let ident = &self.bytes[start as usize..self.pos as usize];
|
||||||
T::from_ident(ident)
|
T::from_ident(ident)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
b'"' => {
|
b'"' => {
|
||||||
while let Some(c) = self.advance() {
|
while let Some(c) = self.advance() {
|
||||||
match c {
|
match c {
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
#![feature(vec_pop_if)]
|
||||||
#![feature(if_let_guard)]
|
#![feature(if_let_guard)]
|
||||||
#![feature(slice_partition_dedup)]
|
#![feature(slice_partition_dedup)]
|
||||||
#![feature(noop_waker)]
|
#![feature(noop_waker)]
|
||||||
|
@ -137,7 +138,7 @@ impl<T> TaskQueueInner<T> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_all(threads: usize) -> io::Result<Vec<Ast>> {
|
pub fn parse_all(threads: usize, root: &str) -> io::Result<Vec<Ast>> {
|
||||||
const GIT_DEPS_DIR: &str = "git-deps";
|
const GIT_DEPS_DIR: &str = "git-deps";
|
||||||
|
|
||||||
enum ImportPath<'a> {
|
enum ImportPath<'a> {
|
||||||
|
@ -198,20 +199,15 @@ pub fn parse_all(threads: usize) -> io::Result<Vec<Ast>> {
|
||||||
|
|
||||||
impl<'a> ImportPath<'a> {
|
impl<'a> ImportPath<'a> {
|
||||||
fn resolve(&self, from: &str) -> Result<PathBuf, CantLoadFile> {
|
fn resolve(&self, from: &str) -> Result<PathBuf, CantLoadFile> {
|
||||||
match self {
|
let path = match self {
|
||||||
Self::Root { path } => Ok(Path::new(path).to_owned()),
|
Self::Root { path } => PathBuf::from(path),
|
||||||
Self::Rel { path } => {
|
Self::Rel { path } => PathBuf::from_iter([from, path]),
|
||||||
let path = PathBuf::from_iter([from, path]);
|
|
||||||
match path.canonicalize() {
|
|
||||||
Ok(path) => Ok(path),
|
|
||||||
Err(e) => Err(CantLoadFile(path, e)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Self::Git { path, link, .. } => {
|
Self::Git { path, link, .. } => {
|
||||||
let link = preprocess_git(link);
|
let link = preprocess_git(link);
|
||||||
Ok(PathBuf::from_iter([GIT_DEPS_DIR, link, path]))
|
PathBuf::from_iter([GIT_DEPS_DIR, link, path])
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
path.canonicalize().map_err(|e| CantLoadFile(path, e))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -348,6 +344,7 @@ pub fn parse_all(threads: usize) -> io::Result<Vec<Ast>> {
|
||||||
};
|
};
|
||||||
|
|
||||||
let execute_task = |(_, path, command): Task, buffer: &mut Vec<u8>| {
|
let execute_task = |(_, path, command): Task, buffer: &mut Vec<u8>| {
|
||||||
|
log::dbg!("{path:?}");
|
||||||
if let Some(mut command) = command {
|
if let Some(mut command) = command {
|
||||||
let output = command.output()?;
|
let output = command.output()?;
|
||||||
if !output.status.success() {
|
if !output.status.success() {
|
||||||
|
@ -384,6 +381,10 @@ pub fn parse_all(threads: usize) -> io::Result<Vec<Ast>> {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let path = Path::new(root).canonicalize()?;
|
||||||
|
seen.lock().unwrap().insert(path.clone(), 0);
|
||||||
|
tasks.push((0, path, None));
|
||||||
|
|
||||||
std::thread::scope(|s| (0..threads).for_each(|_| _ = s.spawn(thread)));
|
std::thread::scope(|s| (0..threads).for_each(|_| _ = s.spawn(thread)));
|
||||||
|
|
||||||
ast.into_inner()
|
ast.into_inner()
|
||||||
|
|
|
@ -1,24 +1,12 @@
|
||||||
use std::io;
|
fn main() -> std::io::Result<()> {
|
||||||
|
let root = std::env::args()
|
||||||
|
.nth(1)
|
||||||
|
.unwrap_or_else(|| "main.hb".to_string());
|
||||||
|
|
||||||
use hblang::{codegen, parser};
|
let parsed = hblang::parse_all(1, &root)?;
|
||||||
|
let mut codegen = hblang::codegen::Codegen::default();
|
||||||
|
codegen.files = parsed;
|
||||||
|
|
||||||
fn main() -> io::Result<()> {
|
|
||||||
if std::env::args().len() == 1 {
|
|
||||||
eprintln!("Usage: hblang <file1> <file2> ...");
|
|
||||||
eprintln!(" 1. compiled binary will be printed to stdout");
|
|
||||||
eprintln!(" 2. order of files matters");
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
let files = std::env::args()
|
|
||||||
.skip(1)
|
|
||||||
.map(|path| std::fs::read_to_string(&path).map(|src| (path, src)))
|
|
||||||
.collect::<io::Result<Vec<_>>>()?;
|
|
||||||
|
|
||||||
let mut codegen = codegen::Codegen::default();
|
|
||||||
for (path, content) in files.iter() {
|
|
||||||
codegen.files = vec![parser::Ast::new(&path, &content, &parser::no_loader)];
|
|
||||||
codegen.generate();
|
codegen.generate();
|
||||||
}
|
|
||||||
codegen.dump(&mut std::io::stdout())
|
codegen.dump(&mut std::io::stdout())
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,6 +10,7 @@ use crate::{
|
||||||
codegen::bt,
|
codegen::bt,
|
||||||
ident::{self, Ident},
|
ident::{self, Ident},
|
||||||
lexer::{Lexer, LineMap, Token, TokenKind},
|
lexer::{Lexer, LineMap, Token, TokenKind},
|
||||||
|
log,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub type Pos = u32;
|
pub type Pos = u32;
|
||||||
|
@ -31,11 +32,11 @@ pub mod idfl {
|
||||||
flags! {
|
flags! {
|
||||||
MUTABLE,
|
MUTABLE,
|
||||||
REFERENCED,
|
REFERENCED,
|
||||||
CAPTURED,
|
COMPTIME,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn index(i: IdentFlags) -> u32 {
|
pub fn index(i: IdentFlags) -> u16 {
|
||||||
i & !ALL
|
(i & !ALL) as _
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -49,6 +50,7 @@ pub struct Symbol {
|
||||||
pub flags: IdentFlags,
|
pub flags: IdentFlags,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
struct ScopeIdent {
|
struct ScopeIdent {
|
||||||
ident: Ident,
|
ident: Ident,
|
||||||
declared: bool,
|
declared: bool,
|
||||||
|
@ -61,9 +63,9 @@ pub struct Parser<'a, 'b> {
|
||||||
lexer: Lexer<'b>,
|
lexer: Lexer<'b>,
|
||||||
arena: &'b Arena<'a>,
|
arena: &'b Arena<'a>,
|
||||||
token: Token,
|
token: Token,
|
||||||
idents: Vec<ScopeIdent>,
|
|
||||||
symbols: &'b mut Symbols,
|
symbols: &'b mut Symbols,
|
||||||
ns_bound: usize,
|
ns_bound: usize,
|
||||||
|
idents: Vec<ScopeIdent>,
|
||||||
captured: Vec<Ident>,
|
captured: Vec<Ident>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,9 +78,9 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
lexer,
|
lexer,
|
||||||
path: "",
|
path: "",
|
||||||
arena,
|
arena,
|
||||||
idents: Vec::new(),
|
|
||||||
symbols,
|
symbols,
|
||||||
ns_bound: 0,
|
ns_bound: 0,
|
||||||
|
idents: Vec::new(),
|
||||||
captured: Vec::new(),
|
captured: Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -135,10 +137,11 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let op = self.next().kind;
|
let op = self.next().kind;
|
||||||
|
|
||||||
let right = self.unit_expr();
|
let right = self.unit_expr();
|
||||||
let right = self.bin_expr(right, prec);
|
let right = self.bin_expr(right, prec);
|
||||||
let right = &*self.arena.alloc(right);
|
let right = self.arena.alloc(right);
|
||||||
let left = &*self.arena.alloc(fold);
|
let left = self.arena.alloc(fold);
|
||||||
|
|
||||||
if let Some(op) = op.assign_op() {
|
if let Some(op) = op.assign_op() {
|
||||||
self.flag_idents(*left, idfl::MUTABLE);
|
self.flag_idents(*left, idfl::MUTABLE);
|
||||||
|
@ -159,7 +162,8 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
fold
|
fold
|
||||||
}
|
}
|
||||||
|
|
||||||
fn resolve_ident(&mut self, token: Token, decl: bool) -> (Ident, u32) {
|
fn resolve_ident(&mut self, token: Token, decl: bool) -> (Ident, u16) {
|
||||||
|
let is_ct = self.token.kind == TokenKind::CtIdent;
|
||||||
let name = self.lexer.slice(token.range());
|
let name = self.lexer.slice(token.range());
|
||||||
|
|
||||||
if let Some(builtin) = bt::from_str(name) {
|
if let Some(builtin) = bt::from_str(name) {
|
||||||
|
@ -191,8 +195,9 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
};
|
};
|
||||||
|
|
||||||
id.declared |= decl;
|
id.declared |= decl;
|
||||||
if self.ns_bound > i && id.declared {
|
id.flags |= idfl::COMPTIME * is_ct as u32;
|
||||||
id.flags |= idfl::CAPTURED;
|
if id.declared && self.ns_bound > i {
|
||||||
|
id.flags |= idfl::COMPTIME;
|
||||||
self.captured.push(id.ident);
|
self.captured.push(id.ident);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -244,8 +249,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
self.collect_list(T::Comma, T::RBrace, |s| {
|
self.collect_list(T::Comma, T::RBrace, |s| {
|
||||||
let name = s.expect_advance(T::Ident);
|
let name = s.expect_advance(T::Ident);
|
||||||
s.expect_advance(T::Colon);
|
s.expect_advance(T::Colon);
|
||||||
let ty = s.expr();
|
(s.move_str(name), s.expr())
|
||||||
(s.move_str(name), ty)
|
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
captured: {
|
captured: {
|
||||||
|
@ -263,7 +267,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
token.start
|
token.start
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
T::Ident => {
|
T::Ident | T::CtIdent => {
|
||||||
let (id, index) = self.resolve_ident(token, self.token.kind == T::Decl);
|
let (id, index) = self.resolve_ident(token, self.token.kind == T::Decl);
|
||||||
let name = self.move_str(token);
|
let name = self.move_str(token);
|
||||||
E::Ident { name, id, index }
|
E::Ident { name, id, index }
|
||||||
|
@ -289,7 +293,7 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
args: {
|
args: {
|
||||||
self.expect_advance(T::LParen);
|
self.expect_advance(T::LParen);
|
||||||
self.collect_list(T::Comma, T::RParen, |s| {
|
self.collect_list(T::Comma, T::RParen, |s| {
|
||||||
let name = s.expect_advance(T::Ident);
|
let name = s.advance_ident();
|
||||||
let (id, index) = s.resolve_ident(name, true);
|
let (id, index) = s.resolve_ident(name, true);
|
||||||
s.expect_advance(T::Colon);
|
s.expect_advance(T::Colon);
|
||||||
Arg {
|
Arg {
|
||||||
|
@ -310,7 +314,12 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
pos: token.start,
|
pos: token.start,
|
||||||
op: token.kind,
|
op: token.kind,
|
||||||
val: {
|
val: {
|
||||||
let expr = self.ptr_unit_expr();
|
let expr = if token.kind == T::Xor {
|
||||||
|
let expr = self.expr();
|
||||||
|
self.arena.alloc(expr)
|
||||||
|
} else {
|
||||||
|
self.ptr_unit_expr()
|
||||||
|
};
|
||||||
if token.kind == T::Band {
|
if token.kind == T::Band {
|
||||||
self.flag_idents(*expr, idfl::REFERENCED);
|
self.flag_idents(*expr, idfl::REFERENCED);
|
||||||
}
|
}
|
||||||
|
@ -384,10 +393,21 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
expr
|
expr
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn advance_ident(&mut self) -> Token {
|
||||||
|
if matches!(self.token.kind, TokenKind::Ident | TokenKind::CtIdent) {
|
||||||
|
self.next()
|
||||||
|
} else {
|
||||||
|
self.report(format_args!(
|
||||||
|
"expected identifier, found {:?}",
|
||||||
|
self.token.kind
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn pop_scope(&mut self, frame: usize) {
|
fn pop_scope(&mut self, frame: usize) {
|
||||||
let mut undeclared_count = frame;
|
let mut undeclared_count = frame;
|
||||||
for i in frame..self.idents.len() {
|
for i in frame..self.idents.len() {
|
||||||
if !self.idents[i].declared {
|
if !&self.idents[i].declared {
|
||||||
self.idents.swap(i, undeclared_count);
|
self.idents.swap(i, undeclared_count);
|
||||||
undeclared_count += 1;
|
undeclared_count += 1;
|
||||||
}
|
}
|
||||||
|
@ -445,8 +465,14 @@ impl<'a, 'b> Parser<'a, 'b> {
|
||||||
self.next()
|
self.next()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
fn report(&self, msg: impl std::fmt::Display) -> ! {
|
fn report(&self, msg: impl std::fmt::Display) -> ! {
|
||||||
let (line, col) = self.lexer.line_col(self.token.start);
|
self.report_pos(self.token.start, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[track_caller]
|
||||||
|
fn report_pos(&self, pos: Pos, msg: impl std::fmt::Display) -> ! {
|
||||||
|
let (line, col) = self.lexer.line_col(pos);
|
||||||
eprintln!("{}:{}:{} => {}", self.path, line, col, msg);
|
eprintln!("{}:{}:{} => {}", self.path, line, col, msg);
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
|
@ -478,10 +504,51 @@ pub fn find_symbol(symbols: &[Symbol], id: Ident) -> &Symbol {
|
||||||
pub struct Arg<'a> {
|
pub struct Arg<'a> {
|
||||||
pub name: &'a str,
|
pub name: &'a str,
|
||||||
pub id: Ident,
|
pub id: Ident,
|
||||||
pub index: u32,
|
pub index: u16,
|
||||||
pub ty: Expr<'a>,
|
pub ty: Expr<'a>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
macro_rules! generate_expr {
|
||||||
|
($(#[$meta:meta])* $vis:vis enum $name:ident<$lt:lifetime> {$(
|
||||||
|
$(#[$field_meta:meta])*
|
||||||
|
$variant:ident {
|
||||||
|
$($field:ident: $ty:ty,)*
|
||||||
|
},
|
||||||
|
)*}) => {
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
|
$vis enum $name<$lt> {$(
|
||||||
|
$variant {
|
||||||
|
$($field: $ty,)*
|
||||||
|
},
|
||||||
|
)*}
|
||||||
|
|
||||||
|
impl<$lt> $name<$lt> {
|
||||||
|
pub fn pos(&self) -> Pos {
|
||||||
|
#[allow(unused_variables)]
|
||||||
|
match self {
|
||||||
|
$(Self::$variant { $($field),* } => generate_expr!(@first $(($field),)*).posi(self),)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn used_bytes(&self) -> usize {
|
||||||
|
match self {$(
|
||||||
|
Self::$variant { $($field,)* } => {
|
||||||
|
let fields = [$(($field as *const _ as usize - self as *const _ as usize, std::mem::size_of_val($field)),)*];
|
||||||
|
let (last, size) = fields.iter().copied().max().unwrap();
|
||||||
|
last + size
|
||||||
|
},
|
||||||
|
)*}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
(@first ($($first:tt)*), $($rest:tt)*) => { $($first)* };
|
||||||
|
(@last ($($ign:tt)*), $($rest:tt)*) => { $($rest)* };
|
||||||
|
(@last ($($last:tt)*),) => { $($last)* };
|
||||||
|
}
|
||||||
|
|
||||||
|
// it would be real nice if we could use relative pointers and still pattern match easily
|
||||||
|
generate_expr! {
|
||||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||||
pub enum Expr<'a> {
|
pub enum Expr<'a> {
|
||||||
Break {
|
Break {
|
||||||
|
@ -505,9 +572,9 @@ pub enum Expr<'a> {
|
||||||
val: Option<&'a Self>,
|
val: Option<&'a Self>,
|
||||||
},
|
},
|
||||||
Ident {
|
Ident {
|
||||||
name: &'a str,
|
|
||||||
id: Ident,
|
id: Ident,
|
||||||
index: u32,
|
name: &'a str,
|
||||||
|
index: u16,
|
||||||
},
|
},
|
||||||
Block {
|
Block {
|
||||||
pos: Pos,
|
pos: Pos,
|
||||||
|
@ -566,29 +633,25 @@ pub enum Expr<'a> {
|
||||||
path: &'a str,
|
path: &'a str,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Expr<'a> {
|
|
||||||
pub fn pos(&self) -> Pos {
|
|
||||||
match self {
|
|
||||||
Self::Call { func, .. } => func.pos(),
|
|
||||||
Self::Ident { id, .. } => ident::pos(*id),
|
|
||||||
Self::Break { pos }
|
|
||||||
| Self::Mod { pos, .. }
|
|
||||||
| Self::Directive { pos, .. }
|
|
||||||
| Self::Continue { pos }
|
|
||||||
| Self::Closure { pos, .. }
|
|
||||||
| Self::Block { pos, .. }
|
|
||||||
| Self::Number { pos, .. }
|
|
||||||
| Self::Return { pos, .. }
|
|
||||||
| Self::If { pos, .. }
|
|
||||||
| Self::Loop { pos, .. }
|
|
||||||
| Self::UnOp { pos, .. }
|
|
||||||
| Self::Struct { pos, .. }
|
|
||||||
| Self::Ctor { pos, .. }
|
|
||||||
| Self::Bool { pos, .. } => *pos,
|
|
||||||
Self::BinOp { left, .. } => left.pos(),
|
|
||||||
Self::Field { target, .. } => target.pos(),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
trait Poser {
|
||||||
|
fn posi(self, expr: &Expr) -> Pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Poser for Pos {
|
||||||
|
fn posi(self, expr: &Expr) -> Pos {
|
||||||
|
if matches!(expr, Expr::Ident { .. }) {
|
||||||
|
ident::pos(self)
|
||||||
|
} else {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Poser for &Expr<'a> {
|
||||||
|
fn posi(self, _: &Expr) -> Pos {
|
||||||
|
self.pos()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -817,6 +880,15 @@ impl Ast {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl std::fmt::Display for Ast {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
for expr in self.exprs() {
|
||||||
|
writeln!(f, "{expr}\n")?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Default for Ast {
|
impl Default for Ast {
|
||||||
fn default() -> Self {
|
fn default() -> Self {
|
||||||
Self(AstInner::new("", "", &no_loader))
|
Self(AstInner::new("", "", &no_loader))
|
||||||
|
@ -888,21 +960,22 @@ pub struct Arena<'a> {
|
||||||
impl<'a> Arena<'a> {
|
impl<'a> Arena<'a> {
|
||||||
pub fn alloc_str(&self, token: &str) -> &'a str {
|
pub fn alloc_str(&self, token: &str) -> &'a str {
|
||||||
let ptr = self.alloc_slice(token.as_bytes());
|
let ptr = self.alloc_slice(token.as_bytes());
|
||||||
unsafe { std::str::from_utf8_unchecked_mut(ptr) }
|
unsafe { std::str::from_utf8_unchecked(ptr) }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn alloc<T>(&self, value: T) -> &'a mut T {
|
pub fn alloc(&self, expr: Expr<'a>) -> &'a Expr<'a> {
|
||||||
if std::mem::size_of::<T>() == 0 {
|
let align = std::mem::align_of::<Expr<'a>>();
|
||||||
return unsafe { NonNull::dangling().as_mut() };
|
let size = expr.used_bytes();
|
||||||
}
|
let layout = unsafe { std::alloc::Layout::from_size_align_unchecked(size, align) };
|
||||||
|
|
||||||
let layout = std::alloc::Layout::new::<T>();
|
|
||||||
let ptr = self.alloc_low(layout);
|
let ptr = self.alloc_low(layout);
|
||||||
unsafe { ptr.cast::<T>().write(value) };
|
unsafe {
|
||||||
unsafe { ptr.cast::<T>().as_mut() }
|
ptr.cast::<u64>()
|
||||||
|
.copy_from_nonoverlapping(NonNull::from(&expr).cast(), size / 8)
|
||||||
|
};
|
||||||
|
unsafe { ptr.cast::<Expr<'a>>().as_ref() }
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn alloc_slice<T: Copy>(&self, slice: &[T]) -> &'a mut [T] {
|
pub fn alloc_slice<T: Copy>(&self, slice: &[T]) -> &'a [T] {
|
||||||
if slice.is_empty() || std::mem::size_of::<T>() == 0 {
|
if slice.is_empty() || std::mem::size_of::<T>() == 0 {
|
||||||
return &mut [];
|
return &mut [];
|
||||||
}
|
}
|
||||||
|
@ -914,7 +987,7 @@ impl<'a> Arena<'a> {
|
||||||
.cast::<T>()
|
.cast::<T>()
|
||||||
.copy_from_nonoverlapping(slice.as_ptr(), slice.len())
|
.copy_from_nonoverlapping(slice.as_ptr(), slice.len())
|
||||||
};
|
};
|
||||||
unsafe { std::slice::from_raw_parts_mut(ptr.as_ptr() as _, slice.len()) }
|
unsafe { std::slice::from_raw_parts(ptr.as_ptr() as _, slice.len()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn alloc_low(&self, layout: std::alloc::Layout) -> NonNull<u8> {
|
fn alloc_low(&self, layout: std::alloc::Layout) -> NonNull<u8> {
|
||||||
|
@ -990,11 +1063,17 @@ impl ArenaChunk {
|
||||||
|
|
||||||
impl Drop for ArenaChunk {
|
impl Drop for ArenaChunk {
|
||||||
fn drop(&mut self) {
|
fn drop(&mut self) {
|
||||||
|
log::inf!(
|
||||||
|
"dropping chunk of size: {}",
|
||||||
|
(Self::LAYOUT.size() - (self.end as usize - self.base as usize))
|
||||||
|
* !self.end.is_null() as usize
|
||||||
|
);
|
||||||
let mut current = self.base;
|
let mut current = self.base;
|
||||||
while !current.is_null() {
|
while !current.is_null() {
|
||||||
let next = Self::next(current);
|
let next = Self::next(current);
|
||||||
unsafe { std::alloc::dealloc(current, Self::LAYOUT) };
|
unsafe { std::alloc::dealloc(current, Self::LAYOUT) };
|
||||||
current = next;
|
current = next;
|
||||||
|
log::dbg!("deallocating full chunk");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in a new issue