making a little utility for computing struct layouts

This commit is contained in:
Jakub Doka 2024-09-28 21:56:39 +02:00
parent c3f9e535d3
commit a51b23187d
No known key found for this signature in database
GPG key ID: C6E9A89936B8C143
15 changed files with 779 additions and 727 deletions

View file

@ -85,11 +85,7 @@ fn gen_instrs(generated: &mut String) -> Result<(), Box<dyn std::error::Error>>
}
'_name_list: {
writeln!(generated, "pub const NAMES: [&str; {}] = [", instructions().count())?;
for [_, name, _, _] in instructions() {
writeln!(generated, " \"{}\",", name.to_lowercase())?;
}
writeln!(generated, "];")?;
writeln!(generated, "pub const COUNT: u8 = {};", instructions().count())?;
}
let instr = "Instr";

View file

@ -34,7 +34,7 @@ impl TryFrom<u8> for Instr {
Err(value)
}
if value < NAMES.len() as u8 {
if value < COUNT {
unsafe { Ok(core::mem::transmute::<u8, Instr>(value)) }
} else {
failed(value)
@ -99,10 +99,7 @@ pub fn disasm(
};
fn instr_from_byte(b: u8) -> std::io::Result<Instr> {
if b as usize >= instrs::NAMES.len() {
return Err(std::io::ErrorKind::InvalidData.into());
}
Ok(unsafe { std::mem::transmute::<u8, Instr>(b) })
b.try_into().map_err(|_| std::io::ErrorKind::InvalidData.into())
}
let mut labels = HashMap::<u32, u32>::default();

View file

@ -160,12 +160,15 @@ Ty2 := struct {
c: int,
}
useless := struct {}
main := fn(): int {
// `packed` structs have no padding (all fields are alighred to 1)
if @sizeof(packed struct {a: u8, b: u16}) != 3 {
return 9001
}
finst := Ty2.{ty: Ty.{a: 4, b: 1}, c: 3}
finst := Ty2.{ty: .{a: 4, b: 1}, c: 3}
inst := odher_pass(finst)
if inst.c == 3 {
return pass(&inst.ty)

View file

@ -1,5 +1,4 @@
--fmt - format all source files
--fmt-current - format mentioned file
--fmt-stdout - dont write the formatted file but print it
--fmt-stdout - dont write the formatted file but print it
--dump-asm - output assembly instead of raw code, (the assembly is more for debugging the compiler)
--threads <1...> - number of threads compiler can use [default: 1]

View file

@ -8,8 +8,8 @@ use {
parser::{
self, find_symbol, idfl, CommentOr, CtorField, Expr, ExprRef, FileId, Pos, StructField,
},
ty, Field, Func, Global, LoggedMem, ParamAlloc, Reloc, Sig, Struct, SymKey, TypedReloc,
Types,
ty, Field, Func, Global, LoggedMem, OffsetIter, ParamAlloc, Reloc, Sig, Struct, SymKey,
TypedReloc, Types,
},
core::panic,
std::fmt::Display,
@ -483,13 +483,6 @@ impl ItemCtx {
}
fn emit(&mut self, (len, instr): (usize, [u8; instrs::MAX_SIZE])) {
let name = instrs::NAMES[instr[0] as usize];
log::trc!(
"{:08x}: {}: {}",
self.code.len(),
name,
instr.iter().take(len).skip(1).map(|b| format!("{:02x}", b)).collect::<String>()
);
self.code.extend_from_slice(&instr[..len]);
}
@ -729,7 +722,7 @@ impl Codegen {
.filter_map(CommentOr::or)
.map(|sf| Field { name: sf.name.into(), ty: self.ty(&sf.ty) })
.collect();
self.tys.structs.push(Struct { fields, explicit_alignment });
self.tys.structs.push(Struct { name: 0, file: 0, fields, explicit_alignment });
self.tys.structs.len() as u32 - 1
}
@ -1122,15 +1115,13 @@ impl Codegen {
match ty.expand() {
ty::Kind::Struct(stru) => {
let mut offset = 0;
let sfields = self.tys.structs[stru as usize].fields.clone();
for (sfield, field) in sfields.iter().zip(fields) {
let mut oiter = OffsetIter::new(stru);
for field in fields {
let (ty, offset) = oiter.next_ty(&self.tys).unwrap();
let loc = loc.as_ref().offset(offset);
let ctx = Ctx::default().with_loc(loc).with_ty(sfield.ty);
let ctx = Ctx::default().with_loc(loc).with_ty(ty);
let value = self.expr_ctx(field, ctx)?;
self.ci.free_loc(value.loc);
offset += self.tys.size_of(sfield.ty);
offset = Types::align_up(offset, self.tys.align_of(sfield.ty));
}
}
ty::Kind::Slice(arr) => {
@ -1944,21 +1935,14 @@ impl Codegen {
.loc
.or_else(|| right.take_owned())
.unwrap_or_else(|| Loc::stack(self.ci.stack.allocate(self.tys.size_of(ty))));
let mut offset = 0;
for &Field { ty, .. } in self.tys.structs[stuct as usize].fields.clone().iter() {
offset = Types::align_up(
offset,
self.tys.structs[stuct as usize]
.explicit_alignment
.unwrap_or(self.tys.align_of(ty)),
);
let size = self.tys.size_of(ty);
let mut oiter = OffsetIter::new(stuct);
while let Some((ty, offset)) = oiter.next_ty(&self.tys) {
let ctx = Ctx::from(Value { ty, loc: loc.as_ref().offset(offset) });
let left = left.as_ref().offset(offset);
let right = right.as_ref().offset(offset);
let value = self.struct_op(op, ty, ctx, left, right)?;
self.ci.free_loc(value.loc);
offset += size;
}
self.ci.free_loc(left);
@ -2317,9 +2301,10 @@ impl Codegen {
Reloc::pack_srel(stack, off)
}
// TODO: sometimes its better to do this in bulk
fn ty(&mut self, expr: &Expr) -> ty::Id {
ty::Id::from(self.eval_const(expr, ty::TYPE))
self.tys
.ty(self.ci.file, expr, &self.files)
.unwrap_or_else(|| ty::Id::from(self.eval_const(expr, ty::TYPE)))
}
fn read_trap(addr: u64) -> Option<&'static trap::Trap> {

View file

@ -272,6 +272,7 @@ impl TokenKind {
Self::Div => a.wrapping_div(b),
Self::Shl => a.wrapping_shl(b as _),
Self::Eq => (a == b) as i64,
Self::Ne => (a != b) as i64,
Self::Band => a & b,
s => todo!("{s}"),
}

View file

@ -19,12 +19,13 @@
extract_if,
ptr_internals
)]
#![allow(stable_features, internal_features, clippy::format_collect)]
#![allow(stable_features, internal_features)]
use {
self::{
ident::Ident,
parser::{Expr, ExprRef, FileId},
lexer::TokenKind,
parser::{CommentOr, Expr, ExprRef, FileId},
son::reg,
ty::ArrayLen,
},
@ -32,6 +33,7 @@ use {
parser::Ast,
std::{
collections::{hash_map, BTreeMap, VecDeque},
fmt::Display,
io,
ops::Range,
path::{Path, PathBuf},
@ -58,6 +60,7 @@ pub mod parser;
pub mod son;
mod lexer;
mod vc;
mod task {
use super::Offset;
@ -164,8 +167,9 @@ mod log {
mod ty {
use {
crate::{
ident,
lexer::TokenKind,
parser::{self, Expr},
parser::{self},
},
std::{num::NonZeroU32, ops::Range},
};
@ -427,34 +431,22 @@ mod ty {
TK::Ptr(ty) => {
write!(f, "^{}", self.rety(self.tys.ptrs[ty as usize].base))
}
_ if let Some((key, _)) = self
.tys
.syms
.iter()
.find(|(sym, &ty)| sym.file < self.files.len() as u32 && ty == self.ty)
&& let Some(name) = self.files[key.file as usize].exprs().iter().find_map(
|expr| match expr {
Expr::BinOp {
left: &Expr::Ident { name, id, .. },
op: TokenKind::Decl,
..
} if id == key.ident => Some(name),
_ => None,
},
) =>
{
write!(f, "{name}")
}
TK::Struct(idx) => {
let record = &self.tys.structs[idx as usize];
if ident::is_null(record.name) {
write!(f, "[{idx}]{{")?;
for (i, &super::Field { ref name, ty }) in record.fields.iter().enumerate() {
for (i, &super::Field { ref name, ty }) in record.fields.iter().enumerate()
{
if i != 0 {
write!(f, ", ")?;
}
write!(f, "{name}: {}", self.rety(ty))?;
}
write!(f, "}}")
} else {
let file = &self.files[record.file as usize];
write!(f, "{}", file.ident_str(record.name))
}
}
TK::Func(idx) => write!(f, "fn{idx}"),
TK::Global(idx) => write!(f, "global{idx}"),
@ -585,6 +577,8 @@ struct Field {
}
struct Struct {
name: Ident,
file: FileId,
explicit_alignment: Option<u32>,
fields: Rc<[Field]>,
}
@ -639,6 +633,53 @@ struct Types {
const HEADER_SIZE: usize = std::mem::size_of::<AbleOsExecutableHeader>();
impl Types {
/// returns none if comptime eval is required
fn ty(&mut self, file: FileId, expr: &Expr, files: &[parser::Ast]) -> Option<ty::Id> {
Some(match *expr {
Expr::UnOp { op: TokenKind::Xor, val, .. } => {
let base = self.ty(file, val, files)?;
self.make_ptr(base)
}
Expr::Ident { id, .. } if ident::is_null(id) => id.into(),
Expr::Ident { id, .. } => {
let f = &files[file as usize];
let (Expr::BinOp { right, .. }, name) = f.find_decl(Ok(id))? else {
unreachable!()
};
let ty = self.ty(file, right, files)?;
if let ty::Kind::Struct(s) = ty.expand() {
self.structs[s as usize].name = name;
}
ty
}
Expr::Struct { pos, fields, packed, .. } => {
let sym = SymKey { file, ident: pos };
if let Some(&ty) = self.syms.get(&sym) {
return Some(ty);
}
let fields = fields
.iter()
.filter_map(CommentOr::or)
.map(|sf| {
Some(Field { name: sf.name.into(), ty: self.ty(file, &sf.ty, files)? })
})
.collect::<Option<_>>()?;
self.structs.push(Struct {
name: 0,
file,
fields,
explicit_alignment: packed.then_some(1),
});
let ty = ty::Kind::Struct(self.structs.len() as u32 - 1).compress();
self.syms.insert(sym, ty);
ty
}
_ => return None,
})
}
fn assemble(&mut self, to: &mut Vec<u8>) {
to.extend([0u8; HEADER_SIZE]);
@ -762,14 +803,10 @@ impl Types {
}
fn offset_of(&self, idx: ty::Struct, field: &str) -> Option<(Offset, ty::Id)> {
let record = &self.structs[idx as usize];
let until = record.fields.iter().position(|f| f.name.as_ref() == field)?;
let mut offset = 0;
for &Field { ty, .. } in &record.fields[..until] {
offset = Self::align_up(offset, record.explicit_alignment.unwrap_or(self.align_of(ty)));
offset += self.size_of(ty);
}
Some((offset, record.fields[until].ty))
OffsetIter::new(idx)
.into_iter(self)
.find(|(f, _)| f.name.as_ref() == field)
.map(|(f, off)| (off, f.ty))
}
fn make_ptr(&mut self, base: ty::Id) -> ty::Id {
@ -812,10 +849,6 @@ impl Types {
.inner()
}
fn align_up(value: Size, align: Size) -> Size {
(value + align - 1) & !(align - 1)
}
fn size_of(&self, ty: ty::Id) -> Size {
match ty.expand() {
ty::Kind::Ptr(_) => 8,
@ -834,14 +867,9 @@ impl Types {
}
}
ty::Kind::Struct(stru) => {
let mut offset = 0u32;
let record = &self.structs[stru as usize];
for &Field { ty, .. } in record.fields.iter() {
let align = record.explicit_alignment.unwrap_or(self.align_of(ty));
offset = Self::align_up(offset, align);
offset += self.size_of(ty);
}
offset
let mut oiter = OffsetIter::new(stru);
while oiter.next(self).is_some() {}
oiter.offset
}
ty => unimplemented!("size_of: {:?}", ty),
}
@ -856,7 +884,7 @@ impl Types {
.iter()
.map(|&Field { ty, .. }| self.align_of(ty))
.max()
.unwrap()
.unwrap_or(1)
})
}
ty::Kind::Slice(arr) => {
@ -878,6 +906,43 @@ impl Types {
}
}
struct OffsetIter {
strct: ty::Struct,
offset: Offset,
index: usize,
}
fn align_up(value: Size, align: Size) -> Size {
(value + align - 1) & !(align - 1)
}
impl OffsetIter {
fn new(strct: ty::Struct) -> Self {
Self { strct, offset: 0, index: 0 }
}
fn next<'a>(&mut self, tys: &'a Types) -> Option<(&'a Field, Offset)> {
let stru = &tys.structs[self.strct as usize];
let field = stru.fields.get(self.index)?;
self.index += 1;
let align = stru.explicit_alignment.unwrap_or_else(|| tys.align_of(field.ty));
self.offset = align_up(self.offset, align);
let off = self.offset;
self.offset += tys.size_of(field.ty);
Some((field, off))
}
fn next_ty(&mut self, tys: &Types) -> Option<(ty::Id, Offset)> {
let (field, off) = self.next(tys)?;
Some((field.ty, off))
}
fn into_iter(mut self, tys: &Types) -> impl Iterator<Item = (&Field, Offset)> {
std::iter::from_fn(move || self.next(tys))
}
}
struct TaskQueue<T> {
inner: Mutex<TaskQueueInner<T>>,
}
@ -1277,7 +1342,7 @@ fn test_run_vm(out: &[u8], output: &mut String) {
#[derive(Default)]
pub struct Options {
pub fmt: bool,
pub fmt_current: bool,
pub fmt_stdout: bool,
pub dump_asm: bool,
pub extra_threads: usize,
}
@ -1328,7 +1393,7 @@ pub fn run_compiler(
for parsed in parsed {
format_ast(parsed)?;
}
} else if options.fmt_current {
} else if options.fmt_stdout {
let ast = parsed.into_iter().next().unwrap();
let source = std::fs::read_to_string(&*ast.path)?;
format_to(&ast, &source, out)?;
@ -1352,6 +1417,42 @@ pub fn run_compiler(
#[derive(Default)]
pub struct LoggedMem {
pub mem: hbvm::mem::HostMemory,
op_buf: Vec<hbbytecode::Oper>,
disp_buf: String,
prev_instr: Option<hbbytecode::Instr>,
}
impl LoggedMem {
unsafe fn display_instr<T>(&mut self, instr: hbbytecode::Instr, addr: hbvm::mem::Address) {
let novm: *const hbvm::Vm<Self, 0> = std::ptr::null();
let offset = std::ptr::addr_of!((*novm).memory) as usize;
let regs = unsafe {
&*std::ptr::addr_of!(
(*(((self as *mut _ as *mut u8).sub(offset)) as *const hbvm::Vm<Self, 0>))
.registers
)
};
let mut bytes = core::slice::from_raw_parts(
(addr.get() - 1) as *const u8,
std::mem::size_of::<T>() + 1,
);
use std::fmt::Write;
hbbytecode::parse_args(&mut bytes, instr, &mut self.op_buf).unwrap();
debug_assert!(bytes.is_empty());
self.disp_buf.clear();
write!(self.disp_buf, "{:<10}", format!("{instr:?}")).unwrap();
for (i, op) in self.op_buf.drain(..).enumerate() {
if i != 0 {
write!(self.disp_buf, ", ").unwrap();
}
write!(self.disp_buf, "{op:?}").unwrap();
if let hbbytecode::Oper::R(r) = op {
write!(self.disp_buf, "({})", regs[r as usize].0).unwrap()
}
}
log::trc!("read-typed: {:x}: {}", addr.get(), self.disp_buf);
}
}
impl hbvm::mem::Memory for LoggedMem {
@ -1362,13 +1463,9 @@ impl hbvm::mem::Memory for LoggedMem {
count: usize,
) -> Result<(), hbvm::mem::LoadError> {
log::trc!(
"load: {:x} {:?}",
"load: {:x} {}",
addr.get(),
core::slice::from_raw_parts(addr.get() as *const u8, count)
.iter()
.rev()
.map(|&b| format!("{b:02x}"))
.collect::<String>()
AsHex(core::slice::from_raw_parts(addr.get() as *const u8, count))
);
self.mem.load(addr, target, count)
}
@ -1379,39 +1476,38 @@ impl hbvm::mem::Memory for LoggedMem {
source: *const u8,
count: usize,
) -> Result<(), hbvm::mem::StoreError> {
log::trc!(
"store: {:x} {:?}",
addr.get(),
core::slice::from_raw_parts(source, count)
.iter()
.rev()
.map(|&b| format!("{b:02x}"))
.collect::<String>()
);
log::trc!("store: {:x} {}", addr.get(), AsHex(core::slice::from_raw_parts(source, count)));
self.mem.store(addr, source, count)
}
unsafe fn prog_read<T: Copy>(&mut self, addr: hbvm::mem::Address) -> T {
log::trc!(
"read-typed: {:x} {} {:?}",
addr.get(),
std::any::type_name::<T>(),
if core::mem::size_of::<T>() == 1
&& let Some(nm) =
instrs::NAMES.get(std::ptr::read(addr.get() as *const u8) as usize)
{
nm.to_string()
} else {
core::slice::from_raw_parts(addr.get() as *const u8, core::mem::size_of::<T>())
.iter()
.map(|&b| format!("{:02x}", b))
.collect::<String>()
unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: hbvm::mem::Address) -> T {
if log::LOG_LEVEL == log::Level::Trc {
if std::any::TypeId::of::<u8>() == std::any::TypeId::of::<T>() {
if let Some(instr) = self.prev_instr {
self.display_instr::<()>(instr, addr);
}
);
self.prev_instr = hbbytecode::Instr::try_from(*(addr.get() as *const u8)).ok();
} else {
let instr = self.prev_instr.take().unwrap();
self.display_instr::<T>(instr, addr);
}
}
self.mem.prog_read(addr)
}
}
struct AsHex<'a>(&'a [u8]);
impl Display for AsHex<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
for &b in self.0 {
write!(f, "{b:02x}")?;
}
Ok(())
}
}
#[cfg(test)]
mod test {
use std::sync::Arc;

View file

@ -14,7 +14,7 @@ fn main() -> std::io::Result<()> {
args.iter().filter(|a| !a.starts_with('-')).nth(1).copied().unwrap_or("main.hb"),
hblang::Options {
fmt: args.contains(&"--fmt"),
fmt_current: args.contains(&"--fmt-stdout"),
fmt_stdout: args.contains(&"--fmt-stdout"),
dump_asm: args.contains(&"--dump-asm"),
extra_threads: args
.iter()

File diff suppressed because it is too large Load diff

286
hblang/src/vc.rs Normal file
View file

@ -0,0 +1,286 @@
use std::{
fmt::Debug,
mem::MaybeUninit,
ops::{Deref, DerefMut, Not},
ptr::Unique,
};
type Nid = u16;
const VC_SIZE: usize = 16;
const INLINE_ELEMS: usize = VC_SIZE / 2 - 1;
pub union Vc {
inline: InlineVc,
alloced: AllocedVc,
}
impl Default for Vc {
fn default() -> Self {
Vc { inline: InlineVc { elems: MaybeUninit::uninit(), cap: Default::default() } }
}
}
impl Debug for Vc {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
self.as_slice().fmt(f)
}
}
impl Vc {
fn is_inline(&self) -> bool {
unsafe { self.inline.cap <= INLINE_ELEMS as Nid }
}
fn layout(&self) -> Option<std::alloc::Layout> {
unsafe {
self.is_inline()
.not()
.then(|| std::alloc::Layout::array::<Nid>(self.alloced.cap as _).unwrap_unchecked())
}
}
fn len(&self) -> usize {
unsafe {
if self.is_inline() {
self.inline.cap as _
} else {
self.alloced.len as _
}
}
}
fn len_mut(&mut self) -> &mut Nid {
unsafe {
if self.is_inline() {
&mut self.inline.cap
} else {
&mut self.alloced.len
}
}
}
fn as_ptr(&self) -> *const Nid {
unsafe {
match self.is_inline() {
true => self.inline.elems.as_ptr().cast(),
false => self.alloced.base.as_ptr(),
}
}
}
fn as_mut_ptr(&mut self) -> *mut Nid {
unsafe {
match self.is_inline() {
true => self.inline.elems.as_mut_ptr().cast(),
false => self.alloced.base.as_ptr(),
}
}
}
pub fn as_slice(&self) -> &[Nid] {
unsafe { std::slice::from_raw_parts(self.as_ptr(), self.len()) }
}
fn as_slice_mut(&mut self) -> &mut [Nid] {
unsafe { std::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
}
pub fn push(&mut self, value: Nid) {
if let Some(layout) = self.layout()
&& unsafe { self.alloced.len == self.alloced.cap }
{
unsafe {
self.alloced.cap *= 2;
self.alloced.base = Unique::new_unchecked(
std::alloc::realloc(
self.alloced.base.as_ptr().cast(),
layout,
self.alloced.cap as usize * std::mem::size_of::<Nid>(),
)
.cast(),
);
}
} else if self.len() == INLINE_ELEMS {
unsafe {
let mut allcd =
Self::alloc((self.inline.cap + 1).next_power_of_two() as _, self.len());
std::ptr::copy_nonoverlapping(self.as_ptr(), allcd.as_mut_ptr(), self.len());
*self = allcd;
}
}
unsafe {
*self.len_mut() += 1;
self.as_mut_ptr().add(self.len() - 1).write(value);
}
}
unsafe fn alloc(cap: usize, len: usize) -> Self {
debug_assert!(cap > INLINE_ELEMS);
let layout = unsafe { std::alloc::Layout::array::<Nid>(cap).unwrap_unchecked() };
let alloc = unsafe { std::alloc::alloc(layout) };
unsafe {
Vc {
alloced: AllocedVc {
base: Unique::new_unchecked(alloc.cast()),
len: len as _,
cap: cap as _,
},
}
}
}
pub fn swap_remove(&mut self, index: usize) {
let len = self.len() - 1;
self.as_slice_mut().swap(index, len);
*self.len_mut() -= 1;
}
pub fn remove(&mut self, index: usize) {
self.as_slice_mut().copy_within(index + 1.., index);
*self.len_mut() -= 1;
}
}
impl Drop for Vc {
fn drop(&mut self) {
if let Some(layout) = self.layout() {
unsafe {
std::alloc::dealloc(self.alloced.base.as_ptr().cast(), layout);
}
}
}
}
impl Clone for Vc {
fn clone(&self) -> Self {
self.as_slice().into()
}
}
impl IntoIterator for Vc {
type IntoIter = VcIntoIter;
type Item = Nid;
fn into_iter(self) -> Self::IntoIter {
VcIntoIter { start: 0, end: self.len(), vc: self }
}
}
pub struct VcIntoIter {
start: usize,
end: usize,
vc: Vc,
}
impl Iterator for VcIntoIter {
type Item = Nid;
fn next(&mut self) -> Option<Self::Item> {
if self.start == self.end {
return None;
}
let ret = unsafe { std::ptr::read(self.vc.as_slice().get_unchecked(self.start)) };
self.start += 1;
Some(ret)
}
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.end - self.start;
(len, Some(len))
}
}
impl DoubleEndedIterator for VcIntoIter {
fn next_back(&mut self) -> Option<Self::Item> {
if self.start == self.end {
return None;
}
self.end -= 1;
Some(unsafe { std::ptr::read(self.vc.as_slice().get_unchecked(self.end)) })
}
}
impl ExactSizeIterator for VcIntoIter {}
impl<const SIZE: usize> From<[Nid; SIZE]> for Vc {
fn from(value: [Nid; SIZE]) -> Self {
value.as_slice().into()
}
}
impl<'a> From<&'a [Nid]> for Vc {
fn from(value: &'a [Nid]) -> Self {
if value.len() <= INLINE_ELEMS {
let mut dflt = Self::default();
unsafe {
std::ptr::copy_nonoverlapping(value.as_ptr(), dflt.as_mut_ptr(), value.len())
};
dflt.inline.cap = value.len() as _;
dflt
} else {
let mut allcd = unsafe { Self::alloc(value.len(), value.len()) };
unsafe {
std::ptr::copy_nonoverlapping(value.as_ptr(), allcd.as_mut_ptr(), value.len())
};
allcd
}
}
}
impl Deref for Vc {
type Target = [Nid];
fn deref(&self) -> &Self::Target {
self.as_slice()
}
}
impl DerefMut for Vc {
fn deref_mut(&mut self) -> &mut Self::Target {
self.as_slice_mut()
}
}
#[derive(Clone, Copy)]
#[repr(C)]
struct InlineVc {
cap: Nid,
elems: MaybeUninit<[Nid; INLINE_ELEMS]>,
}
#[derive(Clone, Copy)]
#[repr(C)]
struct AllocedVc {
cap: Nid,
len: Nid,
base: Unique<Nid>,
}
#[derive(Default)]
pub struct BitSet {
data: Vec<usize>,
}
impl BitSet {
const ELEM_SIZE: usize = std::mem::size_of::<usize>() * 8;
pub fn clear(&mut self, bit_size: usize) {
let new_len = (bit_size + Self::ELEM_SIZE - 1) / Self::ELEM_SIZE;
self.data.clear();
self.data.resize(new_len, 0);
}
#[track_caller]
pub fn set(&mut self, idx: Nid) -> bool {
let idx = idx as usize;
let data_idx = idx / Self::ELEM_SIZE;
let sub_idx = idx % Self::ELEM_SIZE;
let prev = self.data[data_idx] & (1 << sub_idx);
self.data[data_idx] |= 1 << sub_idx;
prev == 0
}
}

View file

@ -6,8 +6,8 @@ drop:
ADDI64 r254, r254, 16d
JALA r0, r31, 0a
main:
ADDI64 r254, r254, -48d
ST r31, r254, 8a, 40h
ADDI64 r254, r254, -40d
ST r31, r254, 8a, 32h
LI64 r32, 1d
ST r32, r254, 0a, 8h
ADDI64 r32, r254, 0d
@ -15,12 +15,11 @@ main:
JAL r31, r0, :modify
LD r2, r254, 0a, 8h
JAL r31, r0, :drop
LI64 r33, 0d
CP r34, r32
LD r35, r34, 0a, 8h
ADDI64 r1, r35, -2d
LD r31, r254, 8a, 40h
ADDI64 r254, r254, 48d
CP r33, r32
LD r34, r33, 0a, 8h
ADDI64 r1, r34, -2d
LD r31, r254, 8a, 32h
ADDI64 r254, r254, 40d
JALA r0, r31, 0a
modify:
ADDI64 r254, r254, -32d
@ -32,6 +31,6 @@ modify:
LD r31, r254, 0a, 32h
ADDI64 r254, r254, 32d
JALA r0, r31, 0a
code size: 318
code size: 308
ret: 0
status: Ok(())

View file

@ -1,23 +1,28 @@
main:
ADDI64 r254, r254, -72d
ST r31, r254, 48a, 24h
LI64 r32, 4d
ST r32, r254, 0a, 8h
LI64 r32, 1d
ST r32, r254, 8a, 8h
LI64 r32, 3d
ST r32, r254, 16a, 8h
LI64 r33, 3d
JEQ r32, r33, :0
LI64 r1, 9001d
JMP :1
0: LI64 r33, 4d
ST r33, r254, 0a, 8h
LI64 r33, 1d
ST r33, r254, 8a, 8h
LI64 r33, 3d
ST r33, r254, 16a, 8h
ADDI64 r2, r254, 0d
ADDI64 r1, r254, 24d
JAL r31, r0, :odher_pass
LD r32, r254, 40a, 8h
LI64 r33, 3d
JNE r32, r33, :0
ADDI64 r33, r254, 24d
CP r2, r33
LD r33, r254, 40a, 8h
LI64 r32, 3d
JNE r33, r32, :2
ADDI64 r32, r254, 24d
CP r2, r32
JAL r31, r0, :pass
JMP :1
0: LI64 r1, 0d
2: LI64 r1, 0d
1: LD r31, r254, 48a, 24h
ADDI64 r254, r254, 72d
JALA r0, r31, 0a
@ -43,6 +48,6 @@ pass:
LD r31, r254, 0a, 40h
ADDI64 r254, r254, 40d
JALA r0, r31, 0a
code size: 400
code size: 440
ret: 3
status: Ok(())

View file

View file

@ -35,7 +35,7 @@ pub trait Memory {
///
/// # Safety
/// - Data read have to be valid
unsafe fn prog_read<T: Copy>(&mut self, addr: Address) -> T;
unsafe fn prog_read<T: Copy + 'static>(&mut self, addr: Address) -> T;
}
/// Unhandled load access trap

View file

@ -382,7 +382,7 @@ where
/// Decode instruction operands
#[inline(always)]
unsafe fn decode<T: Copy>(&mut self) -> T {
unsafe fn decode<T: Copy + 'static>(&mut self) -> T {
unsafe { self.memory.prog_read::<T>(self.pc + 1_u64) }
}
@ -446,7 +446,7 @@ where
/// Perform binary operation over register and immediate
#[inline(always)]
unsafe fn binary_op_imm<T: ValueVariant>(&mut self, op: impl Fn(T, T) -> T) {
unsafe fn binary_op_imm<T: ValueVariant + 'static>(&mut self, op: impl Fn(T, T) -> T) {
#[derive(Clone, Copy)]
#[repr(packed)]
struct OpsRRImm<I>(OpsRR, I);