diff --git a/Cargo.lock b/Cargo.lock index a026a4d8..95861454 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -35,12 +35,6 @@ version = "0.2.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" -[[package]] -name = "anyhow" -version = "1.0.89" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86fdf8605db99b54d3cd748a44c6d04df638eb5dafb219b135d0149bd0db01f6" - [[package]] name = "async-trait" version = "0.1.83" @@ -153,7 +147,6 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" name = "depell" version = "0.1.0" dependencies = [ - "anyhow", "axum", "getrandom", "htmlm", diff --git a/depell/Cargo.toml b/depell/Cargo.toml index dd6d6f8a..f8b2d757 100644 --- a/depell/Cargo.toml +++ b/depell/Cargo.toml @@ -4,7 +4,6 @@ version = "0.1.0" edition = "2021" [dependencies] -anyhow = "1.0.89" axum = "0.7.7" getrandom = "0.2.15" htmlm = "0.3.0" diff --git a/depell/src/main.rs b/depell/src/main.rs index b26adef7..51ea77a4 100644 --- a/depell/src/main.rs +++ b/depell/src/main.rs @@ -89,8 +89,11 @@ trait Page: Default { Html(Self::default().render(&session)) } - async fn page(session: Session) -> Html { - base(Self::default().render(&session), Some(session)).await + async fn page(session: Option) -> Result, axum::response::Redirect> { + match session { + Some(session) => Ok(base(Self::default().render(&session), Some(session)).await), + None => Err(axum::response::Redirect::permanent("/login")), + } } } @@ -409,7 +412,7 @@ impl axum::extract::FromRequestParts for Session { let (name, expiration) = db::with(|db| { db.get_session - .query_row((dbg!(id),), |r| Ok((r.get::<_, String>(0)?, r.get::<_, u64>(1)?))) + .query_row((id,), |r| Ok((r.get::<_, String>(0)?, r.get::<_, u64>(1)?))) .inspect_err(|e| log::error!("{e}")) .map_err(|_| err) })?; diff --git a/depell/wasm-hbfmt/src/lib.rs b/depell/wasm-hbfmt/src/lib.rs index 0107696e..2b889270 100644 --- a/depell/wasm-hbfmt/src/lib.rs +++ b/depell/wasm-hbfmt/src/lib.rs @@ -117,7 +117,8 @@ unsafe extern "C" fn fmt() { let code = core::str::from_raw_parts(core::ptr::addr_of!(INPUT).cast(), INPUT_LEN); - let arena = hblang::parser::Arena::default(); + let arena = + hblang::parser::Arena::with_capacity(code.len() * hblang::parser::SOURCE_TO_AST_FACTOR); let mut ctx = ParserCtx::default(); let exprs = hblang::parser::Parser::parse(&mut ctx, code, "source.hb", &|_, _| Ok(0), &arena); diff --git a/hblang/src/codegen.rs b/hblang/src/codegen.rs index fa250851..e380358e 100644 --- a/hblang/src/codegen.rs +++ b/hblang/src/codegen.rs @@ -635,7 +635,7 @@ struct Pool { arg_locs: Vec, } -const VM_STACK_SIZE: usize = 1024 * 1024 * 2; +const VM_STACK_SIZE: usize = 1024 * 64; struct Comptime { vm: hbvm::Vm, diff --git a/hblang/src/fmt.rs b/hblang/src/fmt.rs index e29dc142..489637c2 100644 --- a/hblang/src/fmt.rs +++ b/hblang/src/fmt.rs @@ -450,6 +450,15 @@ pub mod test { minned.truncate(len); let ast = parser::Ast::new(ident, minned, &mut ParserCtx::default(), &|_, _| Ok(0)); + log::error!( + "{} / {} = {} | {} / {} = {}", + ast.mem.size(), + input.len(), + ast.mem.size() as f32 / input.len() as f32, + ast.mem.size(), + ast.file.len(), + ast.mem.size() as f32 / ast.file.len() as f32 + ); let mut output = String::new(); write!(output, "{ast}").unwrap(); diff --git a/hblang/src/parser.rs b/hblang/src/parser.rs index e44b8f9b..08807ea4 100644 --- a/hblang/src/parser.rs +++ b/hblang/src/parser.rs @@ -6,6 +6,7 @@ use { }, alloc::{boxed::Box, string::String, vec::Vec}, core::{ + alloc::Layout, cell::UnsafeCell, fmt::{self}, intrinsics::unlikely, @@ -24,6 +25,8 @@ pub type IdentIndex = u16; pub type LoaderError = String; pub type Loader<'a> = &'a (dyn Fn(&str, &str) -> Result + 'a); +pub const SOURCE_TO_AST_FACTOR: usize = 7 * (core::mem::size_of::() / 4) + 1; + pub mod idfl { use super::*; @@ -637,6 +640,15 @@ macro_rules! generate_expr { )*} impl<$lt> $name<$lt> { + pub fn used_bytes(&self) -> usize { + match self { + $(Self::$variant { $($field),* } => { + 0 $(.max($field as *const _ as usize - self as *const _ as usize + + core::mem::size_of::<$ty>()))* + })* + } + } + pub fn pos(&self) -> Pos { #[allow(unused_variables)] match self { @@ -653,7 +665,7 @@ macro_rules! generate_expr { (@last ($($last:tt)*),) => { $($last)* }; } -#[repr(u32)] +#[repr(u8)] #[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum Radix { Hex = 16, @@ -954,7 +966,7 @@ pub struct ParserCtx { #[repr(C)] pub struct AstInner { ref_count: AtomicUsize, - mem: ArenaChunk, + pub mem: ArenaChunk, exprs: *const [Expr<'static>], pub path: Box, @@ -971,7 +983,7 @@ impl AstInner<[Symbol]> { } fn new(file: Box, path: &str, ctx: &mut ParserCtx, loader: Loader) -> NonNull { - let arena = Arena::default(); + let arena = Arena::with_capacity(file.len() * SOURCE_TO_AST_FACTOR); let exprs = unsafe { core::mem::transmute(Parser::parse(ctx, &file, path, loader, &arena)) }; @@ -1070,7 +1082,7 @@ impl ExprRef { } pub fn get<'a>(&self, from: &'a Ast) -> Option<&'a Expr<'a>> { - ArenaChunk::contains(from.mem.base, self.0.as_ptr() as _).then_some(())?; + from.mem.contains(self.0.as_ptr() as _).then_some(())?; // SAFETY: the pointer is or was a valid reference in the past, if it points within one of // arenas regions, it muts be walid, since arena does not give invalid pointers to its // allocations @@ -1197,12 +1209,22 @@ pub struct Arena { } impl Arena { + pub fn with_capacity(cap: usize) -> Arena { + Self { chunk: UnsafeCell::new(ArenaChunk::new(cap, ArenaChunk::default())) } + } + pub fn alloc<'a>(&'a self, expr: Expr<'a>) -> &'a Expr<'a> { - let layout = core::alloc::Layout::new::>(); + let layout = core::alloc::Layout::from_size_align( + expr.used_bytes(), + core::mem::align_of::>(), + ) + .unwrap(); let ptr = self.alloc_low(layout); unsafe { - ptr.cast::() - .copy_from_nonoverlapping(NonNull::from(&expr).cast(), layout.size() / 8) + ptr.cast::().copy_from_nonoverlapping( + NonNull::from(&expr).cast(), + layout.size() / core::mem::size_of::(), + ) }; unsafe { ptr.cast::>().as_ref() } } @@ -1219,9 +1241,6 @@ impl Arena { } fn alloc_low(&self, layout: core::alloc::Layout) -> NonNull { - assert!(layout.align() <= ArenaChunk::ALIGN); - assert!(layout.size() <= ArenaChunk::CHUNK_SIZE); - let chunk = unsafe { &mut *self.chunk.get() }; if let Some(ptr) = chunk.alloc(layout) { @@ -1229,44 +1248,46 @@ impl Arena { } unsafe { - core::ptr::write(chunk, ArenaChunk::new(chunk.base)); + core::ptr::write( + chunk, + ArenaChunk::new( + 1024 * 4 - core::mem::size_of::(), + core::ptr::read(chunk), + ), + ); } chunk.alloc(layout).unwrap() } } -struct ArenaChunk { +pub struct ArenaChunk { base: *mut u8, end: *mut u8, + size: usize, } impl Default for ArenaChunk { fn default() -> Self { - Self { base: core::ptr::null_mut(), end: core::ptr::null_mut() } + Self { + base: core::mem::size_of::() as _, + end: core::mem::size_of::() as _, + size: 0, + } } } impl ArenaChunk { - const ALIGN: usize = 16; - const CHUNK_SIZE: usize = 1 << 16; - const LAYOUT: core::alloc::Layout = - unsafe { core::alloc::Layout::from_size_align_unchecked(Self::CHUNK_SIZE, Self::ALIGN) }; - const NEXT_OFFSET: usize = Self::CHUNK_SIZE - core::mem::size_of::<*mut u8>(); - - fn new(next: *mut u8) -> Self { - let base = unsafe { alloc::alloc::alloc(Self::LAYOUT) }; - let end = unsafe { base.add(Self::NEXT_OFFSET) }; - Self::set_next(base, next); - Self { base, end } + fn layout(size: usize) -> Layout { + Layout::new::().extend(Layout::array::(size).unwrap()).unwrap().0 } - fn set_next(curr: *mut u8, next: *mut u8) { - unsafe { core::ptr::write(curr.add(Self::NEXT_OFFSET) as *mut _, next) }; - } - - fn next(curr: *mut u8) -> *mut u8 { - unsafe { core::ptr::read(curr.add(Self::NEXT_OFFSET) as *mut _) } + fn new(size: usize, next: Self) -> Self { + let mut base = unsafe { alloc::alloc::alloc(Self::layout(size)) }; + let end = unsafe { base.add(size) }; + unsafe { core::ptr::write(base.cast(), next) }; + base = unsafe { base.add(core::mem::size_of::()) }; + Self { base, end, size } } fn alloc(&mut self, layout: core::alloc::Layout) -> Option> { @@ -1279,26 +1300,31 @@ impl ArenaChunk { unsafe { Some(NonNull::new_unchecked(self.end)) } } - fn contains(base: *mut u8, arg: *mut u8) -> bool { - !base.is_null() - && ((unsafe { base.add(Self::CHUNK_SIZE) } > arg && base <= arg) - || Self::contains(Self::next(base), arg)) + fn next(&self) -> Option<&Self> { + unsafe { self.base.cast::().sub(1).as_ref() } + } + + fn contains(&self, arg: *mut u8) -> bool { + (self.base <= arg && unsafe { self.base.add(self.size) } > arg) + || self.next().map_or(false, |s| s.contains(arg)) + } + + pub fn size(&self) -> usize { + self.base as usize + self.size - self.end as usize + self.next().map_or(0, Self::size) } } impl Drop for ArenaChunk { fn drop(&mut self) { - //log::inf!( - // "dropping chunk of size: {}", - // (Self::LAYOUT.size() - (self.end as usize - self.base as usize)) - // * !self.end.is_null() as usize - //); - let mut current = self.base; - while !current.is_null() { - let next = Self::next(current); - unsafe { alloc::alloc::dealloc(current, Self::LAYOUT) }; - current = next; - //log::dbg!("deallocating full chunk"); + if self.size == 0 { + return; + } + _ = self.next().map(|r| unsafe { core::ptr::read(r) }); + unsafe { + alloc::alloc::dealloc( + self.base.sub(core::mem::size_of::()), + Self::layout(self.size), + ) } } }