forked from AbleOS/holey-bytes
microoptimizing bitset
This commit is contained in:
parent
f6f661cee3
commit
acacd10ee9
|
@ -23,7 +23,8 @@
|
|||
slice_from_ptr_range,
|
||||
is_sorted,
|
||||
iter_next_chunk,
|
||||
pointer_is_aligned_to
|
||||
pointer_is_aligned_to,
|
||||
maybe_uninit_fill
|
||||
)]
|
||||
#![warn(clippy::dbg_macro)]
|
||||
#![expect(stable_features, internal_features)]
|
||||
|
@ -69,7 +70,7 @@ pub mod lexer;
|
|||
pub mod parser;
|
||||
pub mod son;
|
||||
|
||||
mod vc;
|
||||
mod utils;
|
||||
|
||||
mod debug {
|
||||
pub fn panicking() -> bool {
|
||||
|
|
|
@ -11,7 +11,7 @@ use {
|
|||
},
|
||||
task,
|
||||
ty::{self, Arg, ArrayLen, Loc, Tuple},
|
||||
vc::{BitSet, Vc},
|
||||
utils::{BitSet, Vc},
|
||||
FTask, Func, Global, Ident, Offset, OffsetIter, Reloc, Sig, StringRef, SymKey, TypeParser,
|
||||
TypedReloc, Types,
|
||||
},
|
||||
|
@ -39,6 +39,7 @@ const GLOBAL_ACLASS: usize = 1;
|
|||
pub mod hbvm;
|
||||
|
||||
type Nid = u16;
|
||||
type AClassId = u16;
|
||||
|
||||
type Lookup = crate::ctx_map::CtxMap<Nid>;
|
||||
|
||||
|
@ -360,7 +361,7 @@ impl Nodes {
|
|||
let mut cursor = min;
|
||||
while cursor != self[load].inputs[0] {
|
||||
self[cursor].antidep = load;
|
||||
if self[cursor].clobbers.contains(&aclass) {
|
||||
if self[cursor].clobbers.get(aclass as _) {
|
||||
min = self[cursor].inputs[0];
|
||||
break;
|
||||
}
|
||||
|
@ -754,7 +755,7 @@ impl Nodes {
|
|||
}
|
||||
|
||||
pub fn aclass_index(&self, region: Nid) -> (usize, Nid) {
|
||||
(self[region].aclass, self[region].mem)
|
||||
(self[region].aclass as _, self[region].mem)
|
||||
}
|
||||
|
||||
fn peephole(&mut self, target: Nid) -> Option<Nid> {
|
||||
|
@ -1662,14 +1663,14 @@ pub struct Node {
|
|||
inputs: Vc,
|
||||
outputs: Vc,
|
||||
peep_triggers: Vc,
|
||||
clobbers: Vec<usize>,
|
||||
clobbers: BitSet,
|
||||
ty: ty::Id,
|
||||
offset: Offset,
|
||||
ralloc_backref: RallocBRef,
|
||||
depth: IDomDepth,
|
||||
lock_rc: LockRc,
|
||||
loop_depth: LoopDepth,
|
||||
aclass: usize,
|
||||
aclass: AClassId,
|
||||
mem: Nid,
|
||||
antidep: Nid,
|
||||
}
|
||||
|
@ -2194,7 +2195,7 @@ impl<'a> Codegen<'a> {
|
|||
|
||||
fn new_stack(&mut self, ty: ty::Id) -> Nid {
|
||||
let stck = self.ci.nodes.new_node_nop(ty, Kind::Stck, [VOID, MEM]);
|
||||
self.ci.nodes[stck].aclass = self.ci.scope.aclasses.len();
|
||||
self.ci.nodes[stck].aclass = self.ci.scope.aclasses.len() as _;
|
||||
self.ci.nodes[stck].mem = stck;
|
||||
self.ci.scope.aclasses.push(AClass::new(&mut self.ci.nodes));
|
||||
stck
|
||||
|
@ -2369,7 +2370,7 @@ impl<'a> Codegen<'a> {
|
|||
ty::Kind::Global(global) => {
|
||||
let gl = &self.tys.ins.globals[global as usize];
|
||||
let value = self.ci.nodes.new_node(gl.ty, Kind::Global { global }, [VOID]);
|
||||
self.ci.nodes[value].aclass = GLOBAL_ACLASS;
|
||||
self.ci.nodes[value].aclass = GLOBAL_ACLASS as _;
|
||||
Some(Value::ptr(value).ty(gl.ty))
|
||||
}
|
||||
_ => Some(Value::new(Nid::MAX).ty(decl)),
|
||||
|
@ -2402,7 +2403,7 @@ impl<'a> Codegen<'a> {
|
|||
}
|
||||
};
|
||||
let global = self.ci.nodes.new_node(ty, Kind::Global { global }, [VOID]);
|
||||
self.ci.nodes[global].aclass = GLOBAL_ACLASS;
|
||||
self.ci.nodes[global].aclass = GLOBAL_ACLASS as _;
|
||||
Some(Value::new(global).ty(ty))
|
||||
}
|
||||
Expr::Return { pos, val } => {
|
||||
|
@ -2472,7 +2473,7 @@ impl<'a> Codegen<'a> {
|
|||
let gl = &self.tys.ins.globals[global as usize];
|
||||
let value =
|
||||
self.ci.nodes.new_node(gl.ty, Kind::Global { global }, [VOID]);
|
||||
self.ci.nodes[value].aclass = GLOBAL_ACLASS;
|
||||
self.ci.nodes[value].aclass = GLOBAL_ACLASS as _;
|
||||
Some(Value::ptr(value).ty(gl.ty))
|
||||
}
|
||||
v => Some(Value::new(Nid::MAX).ty(v.compress())),
|
||||
|
@ -2621,7 +2622,7 @@ impl<'a> Codegen<'a> {
|
|||
let inps = [VOID, lhs.id, rhs.id];
|
||||
let bop =
|
||||
self.ci.nodes.new_node_lit(ty.bin_ret(op), Kind::BinOp { op }, inps);
|
||||
self.ci.nodes[bop.id].aclass = aclass;
|
||||
self.ci.nodes[bop.id].aclass = aclass as _;
|
||||
self.ci.nodes[bop.id].mem = mem;
|
||||
Some(bop)
|
||||
}
|
||||
|
@ -2677,7 +2678,7 @@ impl<'a> Codegen<'a> {
|
|||
let inps = [VOID, bs.id, offset];
|
||||
let ptr =
|
||||
self.ci.nodes.new_node(ty::Id::INT, Kind::BinOp { op: TokenKind::Add }, inps);
|
||||
self.ci.nodes[ptr].aclass = aclass;
|
||||
self.ci.nodes[ptr].aclass = aclass as _;
|
||||
self.ci.nodes[ptr].mem = mem;
|
||||
|
||||
Some(Value::ptr(ptr).ty(elem))
|
||||
|
@ -2855,7 +2856,7 @@ impl<'a> Codegen<'a> {
|
|||
|
||||
let mut inps = Vc::from([NEVER]);
|
||||
let arg_base = self.tys.tmp.args.len();
|
||||
let mut clobbered_aliases = vec![GLOBAL_ACLASS];
|
||||
let mut clobbered_aliases = BitSet::default();
|
||||
for arg in args {
|
||||
let value = self.expr(arg)?;
|
||||
self.add_clobbers(value, &mut clobbered_aliases);
|
||||
|
@ -2871,7 +2872,7 @@ impl<'a> Codegen<'a> {
|
|||
self.ci.nodes.unlock(n);
|
||||
}
|
||||
|
||||
self.append_clobbers(&mut inps, &clobbered_aliases);
|
||||
self.append_clobbers(&mut inps, &mut clobbered_aliases);
|
||||
|
||||
let alt_value = match ty.loc(self.tys) {
|
||||
Loc::Reg => None,
|
||||
|
@ -2928,7 +2929,7 @@ impl<'a> Codegen<'a> {
|
|||
let mut tys = sig.args.args();
|
||||
let mut cargs = cargs.iter();
|
||||
let mut args = args.iter();
|
||||
let mut clobbered_aliases = vec![GLOBAL_ACLASS];
|
||||
let mut clobbered_aliases = BitSet::default();
|
||||
while let Some(ty) = tys.next(self.tys) {
|
||||
let carg = cargs.next().unwrap();
|
||||
let Some(arg) = args.next() else { break };
|
||||
|
@ -2947,7 +2948,7 @@ impl<'a> Codegen<'a> {
|
|||
self.ci.nodes.unlock(n);
|
||||
}
|
||||
|
||||
self.append_clobbers(&mut inps, &clobbered_aliases);
|
||||
self.append_clobbers(&mut inps, &mut clobbered_aliases);
|
||||
|
||||
let alt_value = match sig.ret.loc(self.tys) {
|
||||
Loc::Reg => None,
|
||||
|
@ -3534,27 +3535,28 @@ impl<'a> Codegen<'a> {
|
|||
}
|
||||
}
|
||||
|
||||
fn add_clobbers(&mut self, value: Value, clobbered_aliases: &mut Vec<usize>) {
|
||||
fn add_clobbers(&mut self, value: Value, clobbered_aliases: &mut BitSet) {
|
||||
if let Some(base) = self.tys.base_of(value.ty) {
|
||||
clobbered_aliases.push(self.ci.nodes.aclass_index(value.id).0);
|
||||
clobbered_aliases.set(self.ci.nodes.aclass_index(value.id).0 as _);
|
||||
if base.has_pointers(self.tys) {
|
||||
clobbered_aliases.push(DEFAULT_ACLASS);
|
||||
clobbered_aliases.set(DEFAULT_ACLASS as _);
|
||||
}
|
||||
} else if value.ty.has_pointers(self.tys) {
|
||||
clobbered_aliases.push(DEFAULT_ACLASS);
|
||||
clobbered_aliases.set(DEFAULT_ACLASS as _);
|
||||
}
|
||||
}
|
||||
|
||||
fn append_clobbers(&mut self, inps: &mut Vc, clobbered_aliases: &[usize]) {
|
||||
for &clobbered in clobbered_aliases.iter() {
|
||||
fn append_clobbers(&mut self, inps: &mut Vc, clobbered_aliases: &mut BitSet) {
|
||||
clobbered_aliases.set(GLOBAL_ACLASS as _);
|
||||
for clobbered in clobbered_aliases.iter() {
|
||||
let aclass = &mut self.ci.scope.aclasses[clobbered];
|
||||
self.ci.nodes.load_loop_aclass(clobbered, aclass, &mut self.ci.loops);
|
||||
inps.push(aclass.last_store.get());
|
||||
}
|
||||
}
|
||||
|
||||
fn add_clobber_stores(&mut self, clobbered_aliases: Vec<usize>) {
|
||||
for &clobbered in clobbered_aliases.iter() {
|
||||
fn add_clobber_stores(&mut self, clobbered_aliases: BitSet) {
|
||||
for clobbered in clobbered_aliases.iter() {
|
||||
self.ci.scope.aclasses[clobbered].clobber.set(self.ci.ctrl.get(), &mut self.ci.nodes);
|
||||
}
|
||||
self.ci.nodes[self.ci.ctrl.get()].clobbers = clobbered_aliases;
|
||||
|
@ -3731,7 +3733,7 @@ impl<'a> Codegen<'a> {
|
|||
let (aclass, mem) = self.ci.nodes.aclass_index(val);
|
||||
let inps = [VOID, val, off];
|
||||
let seted = self.ci.nodes.new_node(ty::Id::INT, Kind::BinOp { op: TokenKind::Add }, inps);
|
||||
self.ci.nodes[seted].aclass = aclass;
|
||||
self.ci.nodes[seted].aclass = aclass as _;
|
||||
self.ci.nodes[seted].mem = mem;
|
||||
seted
|
||||
}
|
||||
|
@ -3840,7 +3842,7 @@ impl<'a> Codegen<'a> {
|
|||
&mut self.ci.nodes,
|
||||
));
|
||||
if ty.loc(self.tys) == Loc::Stack {
|
||||
self.ci.nodes[value].aclass = self.ci.scope.aclasses.len();
|
||||
self.ci.nodes[value].aclass = self.ci.scope.aclasses.len() as _;
|
||||
self.ci.scope.aclasses.push(AClass::new(&mut self.ci.nodes));
|
||||
}
|
||||
}
|
||||
|
@ -3919,7 +3921,11 @@ impl<'a> Codegen<'a> {
|
|||
self.ci.nodes.new_node(upcasted, Kind::BinOp { op: TokenKind::Mul }, [
|
||||
VOID, oper.id, cnst,
|
||||
]);
|
||||
return (upcasted, self.ci.nodes[other.id].aclass, self.ci.nodes[other.id].mem);
|
||||
return (
|
||||
upcasted,
|
||||
self.ci.nodes[other.id].aclass as _,
|
||||
self.ci.nodes[other.id].mem,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ use {
|
|||
son::{write_reloc, Kind, MEM},
|
||||
task,
|
||||
ty::{self, Arg, Loc},
|
||||
vc::{BitSet, Vc},
|
||||
utils::{BitSet, Vc},
|
||||
HashMap, Offset, PLoc, Reloc, Sig, Size, TypedReloc, Types,
|
||||
},
|
||||
alloc::{borrow::ToOwned, string::String, vec::Vec},
|
||||
|
|
495
lang/src/utils.rs
Normal file
495
lang/src/utils.rs
Normal file
|
@ -0,0 +1,495 @@
|
|||
use {
|
||||
alloc::alloc,
|
||||
core::{
|
||||
alloc::Layout,
|
||||
fmt::Debug,
|
||||
hint::unreachable_unchecked,
|
||||
mem::MaybeUninit,
|
||||
ops::{Deref, DerefMut, Not},
|
||||
ptr::Unique,
|
||||
},
|
||||
};
|
||||
|
||||
type Nid = u16;
|
||||
|
||||
pub union BitSet {
|
||||
inline: usize,
|
||||
alloced: Unique<AllocedBitSet>,
|
||||
}
|
||||
|
||||
impl Debug for BitSet {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
f.debug_list().entries(self.iter()).finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for BitSet {
|
||||
fn clone(&self) -> Self {
|
||||
if self.is_inline() {
|
||||
Self { inline: unsafe { self.inline } }
|
||||
} else {
|
||||
let (data, _) = self.data_and_len();
|
||||
let (layout, _) = Self::layout(data.len());
|
||||
unsafe {
|
||||
let ptr = alloc::alloc(layout);
|
||||
ptr.copy_from_nonoverlapping(self.alloced.as_ptr() as _, layout.size());
|
||||
Self { alloced: Unique::new_unchecked(ptr as _) }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for BitSet {
|
||||
fn drop(&mut self) {
|
||||
if !self.is_inline() {
|
||||
unsafe {
|
||||
let cap = self.alloced.as_ref().cap;
|
||||
alloc::dealloc(self.alloced.as_ptr() as _, Self::layout(cap).0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for BitSet {
|
||||
fn default() -> Self {
|
||||
Self { inline: Self::FLAG }
|
||||
}
|
||||
}
|
||||
|
||||
impl BitSet {
|
||||
const FLAG: usize = 1 << (Self::UNIT - 1);
|
||||
const INLINE_ELEMS: usize = Self::UNIT - 1;
|
||||
const UNIT: usize = core::mem::size_of::<usize>() * 8;
|
||||
|
||||
fn is_inline(&self) -> bool {
|
||||
unsafe { self.inline & Self::FLAG != 0 }
|
||||
}
|
||||
|
||||
fn data_and_len(&self) -> (&[usize], usize) {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
(core::slice::from_ref(&self.inline), Self::INLINE_ELEMS)
|
||||
} else {
|
||||
let small_vec = self.alloced.as_ref();
|
||||
(
|
||||
core::slice::from_raw_parts(
|
||||
&small_vec.data as *const _ as *const usize,
|
||||
small_vec.cap,
|
||||
),
|
||||
small_vec.cap * core::mem::size_of::<usize>() * 8,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn data_mut_and_len(&mut self) -> (&mut [usize], usize) {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
(core::slice::from_mut(&mut self.inline), INLINE_ELEMS)
|
||||
} else {
|
||||
let small_vec = self.alloced.as_mut();
|
||||
(
|
||||
core::slice::from_raw_parts_mut(
|
||||
&mut small_vec.data as *mut _ as *mut usize,
|
||||
small_vec.cap,
|
||||
),
|
||||
small_vec.cap * Self::UNIT,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn indexes(index: usize) -> (usize, usize) {
|
||||
(index / Self::UNIT, index % Self::UNIT)
|
||||
}
|
||||
|
||||
pub fn get(&self, index: Nid) -> bool {
|
||||
let index = index as usize;
|
||||
let (data, len) = self.data_and_len();
|
||||
if index >= len {
|
||||
return false;
|
||||
}
|
||||
let (elem, bit) = Self::indexes(index);
|
||||
(unsafe { *data.get_unchecked(elem) }) & (1 << bit) != 0
|
||||
}
|
||||
|
||||
pub fn set(&mut self, index: Nid) -> bool {
|
||||
let index = index as usize;
|
||||
let (mut data, len) = self.data_mut_and_len();
|
||||
if core::intrinsics::unlikely(index >= len) {
|
||||
self.grow(index.next_power_of_two().max(4 * Self::UNIT));
|
||||
(data, _) = self.data_mut_and_len();
|
||||
}
|
||||
|
||||
let (elem, bit) = Self::indexes(index);
|
||||
let elem = unsafe { data.get_unchecked_mut(elem) };
|
||||
let prev = *elem;
|
||||
*elem |= 1 << bit;
|
||||
*elem != prev
|
||||
}
|
||||
|
||||
fn grow(&mut self, size: usize) {
|
||||
debug_assert!(size.is_power_of_two());
|
||||
let slot_count = size / Self::UNIT;
|
||||
let (layout, off) = Self::layout(slot_count);
|
||||
let (ptr, prev_len) = unsafe {
|
||||
if self.is_inline() {
|
||||
let ptr = alloc::alloc(layout);
|
||||
*ptr.add(off).cast::<usize>() = self.inline & !Self::FLAG;
|
||||
(ptr, 1)
|
||||
} else {
|
||||
let prev_len = self.alloced.as_ref().cap;
|
||||
let (prev_layout, _) = Self::layout(prev_len);
|
||||
(alloc::realloc(self.alloced.as_ptr() as _, prev_layout, layout.size()), prev_len)
|
||||
}
|
||||
};
|
||||
unsafe {
|
||||
MaybeUninit::fill(
|
||||
core::slice::from_raw_parts_mut(
|
||||
ptr.add(off).cast::<MaybeUninit<usize>>().add(prev_len),
|
||||
slot_count - prev_len,
|
||||
),
|
||||
0,
|
||||
);
|
||||
*ptr.cast::<usize>() = slot_count;
|
||||
core::ptr::write(self, Self { alloced: Unique::new_unchecked(ptr as _) });
|
||||
}
|
||||
}
|
||||
|
||||
fn layout(slot_count: usize) -> (core::alloc::Layout, usize) {
|
||||
unsafe {
|
||||
core::alloc::Layout::new::<AllocedBitSet>()
|
||||
.extend(Layout::array::<usize>(slot_count).unwrap_unchecked())
|
||||
.unwrap_unchecked()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> BitSetIter {
|
||||
if self.is_inline() {
|
||||
BitSetIter { index: 0, current: unsafe { self.inline & !Self::FLAG }, remining: &[] }
|
||||
} else {
|
||||
let &[current, ref remining @ ..] = self.data_and_len().0 else {
|
||||
unsafe { unreachable_unchecked() }
|
||||
};
|
||||
BitSetIter { index: 0, current, remining }
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear(&mut self, len: usize) {
|
||||
if len > self.data_and_len().1 {
|
||||
self.grow(len.next_power_of_two().max(4 * Self::UNIT));
|
||||
}
|
||||
if self.is_inline() {
|
||||
unsafe { self.inline &= Self::FLAG };
|
||||
} else {
|
||||
self.data_mut_and_len().0.fill(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct BitSetIter<'a> {
|
||||
index: usize,
|
||||
current: usize,
|
||||
remining: &'a [usize],
|
||||
}
|
||||
|
||||
impl Iterator for BitSetIter<'_> {
|
||||
type Item = usize;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
while self.current == 0 {
|
||||
self.current = *self.remining.take_first()?;
|
||||
self.index += 1;
|
||||
}
|
||||
|
||||
let sub_idx = self.current.trailing_zeros() as usize;
|
||||
self.current &= self.current - 1;
|
||||
Some(self.index * BitSet::UNIT + sub_idx)
|
||||
}
|
||||
}
|
||||
|
||||
struct AllocedBitSet {
|
||||
cap: usize,
|
||||
data: [usize; 0],
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[test]
|
||||
fn test_small_bit_set() {
|
||||
use std::vec::Vec;
|
||||
|
||||
let mut sv = BitSet::default();
|
||||
|
||||
sv.set(10);
|
||||
debug_assert!(sv.get(10));
|
||||
sv.set(100);
|
||||
debug_assert!(sv.get(100));
|
||||
sv.set(10000);
|
||||
debug_assert!(sv.get(10000));
|
||||
debug_assert_eq!(sv.iter().collect::<Vec<_>>(), &[10, 100, 10000]);
|
||||
sv.clear(10000);
|
||||
debug_assert_eq!(sv.iter().collect::<Vec<_>>(), &[]);
|
||||
}
|
||||
|
||||
pub union Vc {
|
||||
inline: InlineVc,
|
||||
alloced: AllocedVc,
|
||||
}
|
||||
|
||||
impl Default for Vc {
|
||||
fn default() -> Self {
|
||||
Vc { inline: InlineVc { elems: MaybeUninit::uninit(), cap: Default::default() } }
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Vc {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
self.as_slice().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<Nid> for Vc {
|
||||
fn from_iter<T: IntoIterator<Item = Nid>>(iter: T) -> Self {
|
||||
let mut slf = Self::default();
|
||||
for i in iter {
|
||||
slf.push(i);
|
||||
}
|
||||
slf
|
||||
}
|
||||
}
|
||||
|
||||
const INLINE_ELEMS: usize = VC_SIZE / 2 - 1;
|
||||
const VC_SIZE: usize = 16;
|
||||
|
||||
impl Vc {
|
||||
fn is_inline(&self) -> bool {
|
||||
unsafe { self.inline.cap <= INLINE_ELEMS as Nid }
|
||||
}
|
||||
|
||||
fn layout(&self) -> Option<core::alloc::Layout> {
|
||||
unsafe {
|
||||
self.is_inline().not().then(|| {
|
||||
core::alloc::Layout::array::<Nid>(self.alloced.cap as _).unwrap_unchecked()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
self.inline.cap as _
|
||||
} else {
|
||||
self.alloced.len as _
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn len_mut(&mut self) -> &mut Nid {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
&mut self.inline.cap
|
||||
} else {
|
||||
&mut self.alloced.len
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_ptr(&self) -> *const Nid {
|
||||
unsafe {
|
||||
match self.is_inline() {
|
||||
true => self.inline.elems.as_ptr().cast(),
|
||||
false => self.alloced.base.as_ptr(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_mut_ptr(&mut self) -> *mut Nid {
|
||||
unsafe {
|
||||
match self.is_inline() {
|
||||
true => self.inline.elems.as_mut_ptr().cast(),
|
||||
false => self.alloced.base.as_ptr(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_slice(&self) -> &[Nid] {
|
||||
unsafe { core::slice::from_raw_parts(self.as_ptr(), self.len()) }
|
||||
}
|
||||
|
||||
fn as_slice_mut(&mut self) -> &mut [Nid] {
|
||||
unsafe { core::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
|
||||
}
|
||||
|
||||
pub fn push(&mut self, value: Nid) {
|
||||
if let Some(layout) = self.layout()
|
||||
&& unsafe { self.alloced.len == self.alloced.cap }
|
||||
{
|
||||
unsafe {
|
||||
self.alloced.cap *= 2;
|
||||
self.alloced.base = Unique::new_unchecked(
|
||||
alloc::realloc(
|
||||
self.alloced.base.as_ptr().cast(),
|
||||
layout,
|
||||
self.alloced.cap as usize * core::mem::size_of::<Nid>(),
|
||||
)
|
||||
.cast(),
|
||||
);
|
||||
}
|
||||
} else if self.len() == INLINE_ELEMS {
|
||||
unsafe {
|
||||
let mut allcd =
|
||||
Self::alloc((self.inline.cap + 1).next_power_of_two() as _, self.len());
|
||||
core::ptr::copy_nonoverlapping(self.as_ptr(), allcd.as_mut_ptr(), self.len());
|
||||
*self = allcd;
|
||||
}
|
||||
}
|
||||
|
||||
unsafe {
|
||||
*self.len_mut() += 1;
|
||||
self.as_mut_ptr().add(self.len() - 1).write(value);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn alloc(cap: usize, len: usize) -> Self {
|
||||
debug_assert!(cap > INLINE_ELEMS);
|
||||
let layout = unsafe { core::alloc::Layout::array::<Nid>(cap).unwrap_unchecked() };
|
||||
let alloc = unsafe { alloc::alloc(layout) };
|
||||
unsafe {
|
||||
Vc {
|
||||
alloced: AllocedVc {
|
||||
base: Unique::new_unchecked(alloc.cast()),
|
||||
len: len as _,
|
||||
cap: cap as _,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn swap_remove(&mut self, index: usize) {
|
||||
let len = self.len() - 1;
|
||||
self.as_slice_mut().swap(index, len);
|
||||
*self.len_mut() -= 1;
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, index: usize) {
|
||||
self.as_slice_mut().copy_within(index + 1.., index);
|
||||
*self.len_mut() -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Vc {
|
||||
fn drop(&mut self) {
|
||||
if let Some(layout) = self.layout() {
|
||||
unsafe {
|
||||
alloc::dealloc(self.alloced.base.as_ptr().cast(), layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Vc {
|
||||
fn clone(&self) -> Self {
|
||||
self.as_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Vc {
|
||||
type IntoIter = VcIntoIter;
|
||||
type Item = Nid;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
VcIntoIter { start: 0, end: self.len(), vc: self }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VcIntoIter {
|
||||
start: usize,
|
||||
end: usize,
|
||||
vc: Vc,
|
||||
}
|
||||
|
||||
impl Iterator for VcIntoIter {
|
||||
type Item = Nid;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.start == self.end {
|
||||
return None;
|
||||
}
|
||||
|
||||
let ret = unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.start)) };
|
||||
self.start += 1;
|
||||
Some(ret)
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = self.end - self.start;
|
||||
(len, Some(len))
|
||||
}
|
||||
}
|
||||
|
||||
impl DoubleEndedIterator for VcIntoIter {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
if self.start == self.end {
|
||||
return None;
|
||||
}
|
||||
|
||||
self.end -= 1;
|
||||
Some(unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.end)) })
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for VcIntoIter {}
|
||||
|
||||
impl<const SIZE: usize> From<[Nid; SIZE]> for Vc {
|
||||
fn from(value: [Nid; SIZE]) -> Self {
|
||||
value.as_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a [Nid]> for Vc {
|
||||
fn from(value: &'a [Nid]) -> Self {
|
||||
if value.len() <= INLINE_ELEMS {
|
||||
let mut dflt = Self::default();
|
||||
unsafe {
|
||||
core::ptr::copy_nonoverlapping(value.as_ptr(), dflt.as_mut_ptr(), value.len())
|
||||
};
|
||||
dflt.inline.cap = value.len() as _;
|
||||
dflt
|
||||
} else {
|
||||
let mut allcd = unsafe { Self::alloc(value.len(), value.len()) };
|
||||
unsafe {
|
||||
core::ptr::copy_nonoverlapping(value.as_ptr(), allcd.as_mut_ptr(), value.len())
|
||||
};
|
||||
allcd
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Vc {
|
||||
type Target = [Nid];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Vc {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
self.as_slice_mut()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[repr(C)]
|
||||
struct InlineVc {
|
||||
cap: Nid,
|
||||
elems: MaybeUninit<[Nid; INLINE_ELEMS]>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[repr(C)]
|
||||
struct AllocedVc {
|
||||
cap: Nid,
|
||||
len: Nid,
|
||||
base: Unique<Nid>,
|
||||
}
|
306
lang/src/vc.rs
306
lang/src/vc.rs
|
@ -1,306 +0,0 @@
|
|||
use {
|
||||
alloc::vec::Vec,
|
||||
core::{
|
||||
fmt::Debug,
|
||||
mem::MaybeUninit,
|
||||
ops::{Deref, DerefMut, Not},
|
||||
ptr::Unique,
|
||||
},
|
||||
};
|
||||
|
||||
type Nid = u16;
|
||||
|
||||
const VC_SIZE: usize = 16;
|
||||
const INLINE_ELEMS: usize = VC_SIZE / 2 - 1;
|
||||
|
||||
pub union Vc {
|
||||
inline: InlineVc,
|
||||
alloced: AllocedVc,
|
||||
}
|
||||
|
||||
impl Default for Vc {
|
||||
fn default() -> Self {
|
||||
Vc { inline: InlineVc { elems: MaybeUninit::uninit(), cap: Default::default() } }
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Vc {
|
||||
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
|
||||
self.as_slice().fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromIterator<Nid> for Vc {
|
||||
fn from_iter<T: IntoIterator<Item = Nid>>(iter: T) -> Self {
|
||||
let mut slf = Self::default();
|
||||
for i in iter {
|
||||
slf.push(i);
|
||||
}
|
||||
slf
|
||||
}
|
||||
}
|
||||
|
||||
impl Vc {
|
||||
fn is_inline(&self) -> bool {
|
||||
unsafe { self.inline.cap <= INLINE_ELEMS as Nid }
|
||||
}
|
||||
|
||||
fn layout(&self) -> Option<core::alloc::Layout> {
|
||||
unsafe {
|
||||
self.is_inline().not().then(|| {
|
||||
core::alloc::Layout::array::<Nid>(self.alloced.cap as _).unwrap_unchecked()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
self.inline.cap as _
|
||||
} else {
|
||||
self.alloced.len as _
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn len_mut(&mut self) -> &mut Nid {
|
||||
unsafe {
|
||||
if self.is_inline() {
|
||||
&mut self.inline.cap
|
||||
} else {
|
||||
&mut self.alloced.len
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_ptr(&self) -> *const Nid {
|
||||
unsafe {
|
||||
match self.is_inline() {
|
||||
true => self.inline.elems.as_ptr().cast(),
|
||||
false => self.alloced.base.as_ptr(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn as_mut_ptr(&mut self) -> *mut Nid {
|
||||
unsafe {
|
||||
match self.is_inline() {
|
||||
true => self.inline.elems.as_mut_ptr().cast(),
|
||||
false => self.alloced.base.as_ptr(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_slice(&self) -> &[Nid] {
|
||||
unsafe { core::slice::from_raw_parts(self.as_ptr(), self.len()) }
|
||||
}
|
||||
|
||||
fn as_slice_mut(&mut self) -> &mut [Nid] {
|
||||
unsafe { core::slice::from_raw_parts_mut(self.as_mut_ptr(), self.len()) }
|
||||
}
|
||||
|
||||
pub fn push(&mut self, value: Nid) {
|
||||
if let Some(layout) = self.layout()
|
||||
&& unsafe { self.alloced.len == self.alloced.cap }
|
||||
{
|
||||
unsafe {
|
||||
self.alloced.cap *= 2;
|
||||
self.alloced.base = Unique::new_unchecked(
|
||||
alloc::alloc::realloc(
|
||||
self.alloced.base.as_ptr().cast(),
|
||||
layout,
|
||||
self.alloced.cap as usize * core::mem::size_of::<Nid>(),
|
||||
)
|
||||
.cast(),
|
||||
);
|
||||
}
|
||||
} else if self.len() == INLINE_ELEMS {
|
||||
unsafe {
|
||||
let mut allcd =
|
||||
Self::alloc((self.inline.cap + 1).next_power_of_two() as _, self.len());
|
||||
core::ptr::copy_nonoverlapping(self.as_ptr(), allcd.as_mut_ptr(), self.len());
|
||||
*self = allcd;
|
||||
}
|
||||
}
|
||||
|
||||
unsafe {
|
||||
*self.len_mut() += 1;
|
||||
self.as_mut_ptr().add(self.len() - 1).write(value);
|
||||
}
|
||||
}
|
||||
|
||||
unsafe fn alloc(cap: usize, len: usize) -> Self {
|
||||
debug_assert!(cap > INLINE_ELEMS);
|
||||
let layout = unsafe { core::alloc::Layout::array::<Nid>(cap).unwrap_unchecked() };
|
||||
let alloc = unsafe { alloc::alloc::alloc(layout) };
|
||||
unsafe {
|
||||
Vc {
|
||||
alloced: AllocedVc {
|
||||
base: Unique::new_unchecked(alloc.cast()),
|
||||
len: len as _,
|
||||
cap: cap as _,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn swap_remove(&mut self, index: usize) {
|
||||
let len = self.len() - 1;
|
||||
self.as_slice_mut().swap(index, len);
|
||||
*self.len_mut() -= 1;
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, index: usize) {
|
||||
self.as_slice_mut().copy_within(index + 1.., index);
|
||||
*self.len_mut() -= 1;
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Vc {
|
||||
fn drop(&mut self) {
|
||||
if let Some(layout) = self.layout() {
|
||||
unsafe {
|
||||
alloc::alloc::dealloc(self.alloced.base.as_ptr().cast(), layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Vc {
|
||||
fn clone(&self) -> Self {
|
||||
self.as_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoIterator for Vc {
|
||||
type IntoIter = VcIntoIter;
|
||||
type Item = Nid;
|
||||
|
||||
fn into_iter(self) -> Self::IntoIter {
|
||||
VcIntoIter { start: 0, end: self.len(), vc: self }
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VcIntoIter {
|
||||
start: usize,
|
||||
end: usize,
|
||||
vc: Vc,
|
||||
}
|
||||
|
||||
impl Iterator for VcIntoIter {
|
||||
type Item = Nid;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.start == self.end {
|
||||
return None;
|
||||
}
|
||||
|
||||
let ret = unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.start)) };
|
||||
self.start += 1;
|
||||
Some(ret)
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
let len = self.end - self.start;
|
||||
(len, Some(len))
|
||||
}
|
||||
}
|
||||
|
||||
impl DoubleEndedIterator for VcIntoIter {
|
||||
fn next_back(&mut self) -> Option<Self::Item> {
|
||||
if self.start == self.end {
|
||||
return None;
|
||||
}
|
||||
|
||||
self.end -= 1;
|
||||
Some(unsafe { core::ptr::read(self.vc.as_slice().get_unchecked(self.end)) })
|
||||
}
|
||||
}
|
||||
|
||||
impl ExactSizeIterator for VcIntoIter {}
|
||||
|
||||
impl<const SIZE: usize> From<[Nid; SIZE]> for Vc {
|
||||
fn from(value: [Nid; SIZE]) -> Self {
|
||||
value.as_slice().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a [Nid]> for Vc {
|
||||
fn from(value: &'a [Nid]) -> Self {
|
||||
if value.len() <= INLINE_ELEMS {
|
||||
let mut dflt = Self::default();
|
||||
unsafe {
|
||||
core::ptr::copy_nonoverlapping(value.as_ptr(), dflt.as_mut_ptr(), value.len())
|
||||
};
|
||||
dflt.inline.cap = value.len() as _;
|
||||
dflt
|
||||
} else {
|
||||
let mut allcd = unsafe { Self::alloc(value.len(), value.len()) };
|
||||
unsafe {
|
||||
core::ptr::copy_nonoverlapping(value.as_ptr(), allcd.as_mut_ptr(), value.len())
|
||||
};
|
||||
allcd
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Vc {
|
||||
type Target = [Nid];
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
self.as_slice()
|
||||
}
|
||||
}
|
||||
|
||||
impl DerefMut for Vc {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
self.as_slice_mut()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[repr(C)]
|
||||
struct InlineVc {
|
||||
cap: Nid,
|
||||
elems: MaybeUninit<[Nid; INLINE_ELEMS]>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
#[repr(C)]
|
||||
struct AllocedVc {
|
||||
cap: Nid,
|
||||
len: Nid,
|
||||
base: Unique<Nid>,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
pub struct BitSet {
|
||||
data: Vec<usize>,
|
||||
}
|
||||
|
||||
impl BitSet {
|
||||
const ELEM_SIZE: usize = core::mem::size_of::<usize>() * 8;
|
||||
|
||||
pub fn clear(&mut self, bit_size: usize) {
|
||||
let new_len = bit_size.div_ceil(Self::ELEM_SIZE);
|
||||
self.data.clear();
|
||||
self.data.resize(new_len, 0);
|
||||
}
|
||||
|
||||
pub fn set(&mut self, idx: Nid) -> bool {
|
||||
let idx = idx as usize;
|
||||
let data_idx = idx / Self::ELEM_SIZE;
|
||||
let sub_idx = idx % Self::ELEM_SIZE;
|
||||
let prev = self.data[data_idx] & (1 << sub_idx);
|
||||
self.data[data_idx] |= 1 << sub_idx;
|
||||
prev == 0
|
||||
}
|
||||
|
||||
pub fn get(&self, idx: Nid) -> bool {
|
||||
let idx = idx as usize;
|
||||
let data_idx = idx / Self::ELEM_SIZE;
|
||||
let sub_idx = idx % Self::ELEM_SIZE;
|
||||
let prev = self.data[data_idx] & (1 << sub_idx);
|
||||
prev != 0
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue