This commit is contained in:
Chris Fallin 2021-11-21 17:15:26 -08:00
parent 58ef5a86f7
commit b81e805cf1
9 changed files with 491 additions and 260 deletions

View file

@ -15,3 +15,4 @@ log = "0.4"
env_logger = "0.9"
fxhash = "0.2"
smallvec = "1.7"
regalloc2 = { git = 'https://github.com/bytecodealliance/regalloc2', rev = 'c7bc6c941cd81bbd30b95969009b7e61539f2b4c' }

View file

@ -1,153 +0,0 @@
//! Dataflow analysis.
use fxhash::{FxHashMap, FxHashSet};
use std::collections::VecDeque;
use std::{fmt::Debug, hash::Hash};
use crate::{BlockId, FunctionBody, InstId};
pub trait Lattice: Clone + Debug + PartialEq + Eq {
fn top() -> Self;
fn bottom() -> Self;
fn meet(a: &Self, b: &Self) -> Self;
}
pub trait AnalysisKey: Clone + Debug + PartialEq + Eq + Hash {}
impl AnalysisKey for u32 {}
#[derive(Clone, Debug)]
pub struct AnalysisValue<K: AnalysisKey, L: Lattice> {
pub values: FxHashMap<K, L>,
}
impl<K: AnalysisKey, L: Lattice> std::default::Default for AnalysisValue<K, L> {
fn default() -> Self {
Self {
values: FxHashMap::default(),
}
}
}
impl<K: AnalysisKey, L: Lattice> AnalysisValue<K, L> {
fn meet_with(&mut self, other: &Self, meet_mode: MapMeetMode) -> bool {
let mut changed = false;
let mut to_remove = vec![];
for (key, value) in &mut self.values {
if let Some(other_value) = other.values.get(key) {
let met = L::meet(value, other_value);
if met != *value {
changed = true;
*value = met;
}
} else {
if meet_mode == MapMeetMode::Intersection {
to_remove.push(key.clone());
changed = true;
}
}
}
for k in to_remove {
self.values.remove(&k);
}
if meet_mode == MapMeetMode::Union {
for (key, value) in &other.values {
if !self.values.contains_key(key) {
self.values.insert(key.clone(), value.clone());
changed = true;
}
}
}
changed
}
}
pub trait AnalysisFunction {
type K: AnalysisKey;
type L: Lattice;
fn instruction(
&self,
_input: &mut AnalysisValue<Self::K, Self::L>,
_func: &FunctionBody,
_block: BlockId,
_inst: InstId,
) -> bool {
false
}
fn terminator(
&self,
_input: &mut AnalysisValue<Self::K, Self::L>,
_func: &FunctionBody,
_block: BlockId,
_index: usize,
_next: BlockId,
) -> bool {
false
}
fn meet_mode(&self) -> MapMeetMode {
MapMeetMode::Union
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum MapMeetMode {
Union,
Intersection,
}
#[derive(Clone, Debug, Default)]
pub struct ForwardDataflow<F: AnalysisFunction> {
block_in: Vec<AnalysisValue<F::K, F::L>>,
workqueue: VecDeque<BlockId>,
workqueue_set: FxHashSet<BlockId>,
}
impl<F: AnalysisFunction> ForwardDataflow<F> {
pub fn new(analysis: &F, func: &FunctionBody) -> Self {
let mut ret = ForwardDataflow {
block_in: vec![AnalysisValue::default(); func.blocks.len()],
workqueue: vec![0].into(),
workqueue_set: vec![0].into_iter().collect(),
};
ret.compute(analysis, func);
ret
}
fn compute(&mut self, analysis: &F, func: &FunctionBody) {
while let Some(block) = self.workqueue.pop_front() {
self.workqueue_set.remove(&block);
self.update_block(analysis, func, block);
}
}
fn update_block(&mut self, analysis: &F, func: &FunctionBody, block: BlockId) {
let mut value = self.block_in[block].clone();
let mut changed = false;
for i in 0..func.blocks[block].insts.len() {
changed |= analysis.instruction(&mut value, func, block, i);
}
for (i, succ) in func.blocks[block]
.terminator
.successors()
.into_iter()
.enumerate()
{
let mut term_changed = changed;
let mut value = value.clone();
term_changed |= analysis.terminator(&mut value, func, block, i, succ);
if term_changed {
if self.block_in[succ].meet_with(&value, analysis.meet_mode()) {
if !self.workqueue_set.contains(&succ) {
self.workqueue.push_back(succ);
self.workqueue_set.insert(succ);
}
}
}
}
}
}

View file

@ -143,11 +143,8 @@ fn parse_body<'a, 'b>(
for (arg_idx, &arg_ty) in module.signatures[my_sig].params.iter().enumerate() {
let local_idx = arg_idx as LocalId;
let value = builder.body.values.len() as ValueId;
builder.body.values.push(ValueDef {
kind: ValueKind::Arg(arg_idx),
ty: arg_ty,
});
let value = Value::arg(arg_idx);
builder.body.types.insert(value, arg_ty);
trace!("defining local {} to value {}", local_idx, value);
builder.locals.insert(local_idx, (arg_ty, value));
}
@ -174,8 +171,8 @@ struct FunctionBodyBuilder<'a, 'b> {
body: &'b mut FunctionBody<'a>,
cur_block: Option<BlockId>,
ctrl_stack: Vec<Frame>,
op_stack: Vec<(Type, ValueId)>,
locals: FxHashMap<LocalId, (Type, ValueId)>,
op_stack: Vec<(Type, Value)>,
locals: FxHashMap<LocalId, (Type, Value)>,
block_param_locals: FxHashMap<BlockId, Vec<LocalId>>,
}
@ -198,7 +195,7 @@ enum Frame {
start_depth: usize,
out: BlockId,
el: BlockId,
param_values: Vec<(Type, ValueId)>,
param_values: Vec<(Type, Value)>,
params: Vec<Type>,
results: Vec<Type>,
},
@ -278,7 +275,7 @@ impl<'a, 'b> FunctionBodyBuilder<'a, 'b> {
self.body.blocks[block].params.extend_from_slice(tys);
}
fn pop_n(&mut self, n: usize) -> Vec<ValueId> {
fn pop_n(&mut self, n: usize) -> Vec<Value> {
let new_top = self.op_stack.len() - n;
let ret = self.op_stack[new_top..]
.iter()
@ -288,7 +285,7 @@ impl<'a, 'b> FunctionBodyBuilder<'a, 'b> {
ret
}
fn pop_1(&mut self) -> ValueId {
fn pop_1(&mut self) -> Value {
self.op_stack.pop().unwrap().1
}
@ -327,14 +324,9 @@ impl<'a, 'b> FunctionBodyBuilder<'a, 'b> {
})
.unwrap();
self.op_stack.pop();
let value = self.body.values.len() as ValueId;
self.body.values.push(ValueDef {
ty,
kind: ValueKind::Inst(block, inst, 0),
});
value
Value::inst(block, inst, 0)
} else {
NO_VALUE
Value::undef()
}
});
self.op_stack.push((ty, value));
@ -786,7 +778,7 @@ impl<'a, 'b> FunctionBodyBuilder<'a, 'b> {
&self.ctrl_stack[self.ctrl_stack.len() - 1 - relative_depth as usize]
}
fn fill_block_params_with_locals(&mut self, target: BlockId, args: &mut Vec<Operand>) {
fn fill_block_params_with_locals(&mut self, target: BlockId, args: &mut Vec<Value>) {
if !self.block_param_locals.contains_key(&target) {
let mut keys: Vec<LocalId> = self.locals.keys().cloned().collect();
keys.sort();
@ -799,13 +791,13 @@ impl<'a, 'b> FunctionBodyBuilder<'a, 'b> {
let block_param_locals = self.block_param_locals.get(&target).unwrap();
for local in block_param_locals {
let local_value = self.locals.get(local).unwrap();
args.push(Operand::value(local_value.1));
args.push(local_value.1);
}
}
fn emit_branch(&mut self, target: BlockId, args: &[ValueId]) {
fn emit_branch(&mut self, target: BlockId, args: &[Value]) {
if let Some(block) = self.cur_block {
let mut args: Vec<Operand> = args.iter().map(|&val| Operand::value(val)).collect();
let mut args = args.to_vec();
self.fill_block_params_with_locals(target, &mut args);
let target = BlockTarget {
block: target,
@ -817,25 +809,19 @@ impl<'a, 'b> FunctionBodyBuilder<'a, 'b> {
fn emit_cond_branch(
&mut self,
cond: ValueId,
cond: Value,
if_true: BlockId,
if_true_args: &[ValueId],
if_true_args: &[Value],
if_false: BlockId,
if_false_args: &[ValueId],
if_false_args: &[Value],
) {
if let Some(block) = self.cur_block {
let mut if_true_args = if_true_args
.iter()
.map(|&val| Operand::value(val))
.collect();
let mut if_false_args = if_false_args
.iter()
.map(|&val| Operand::value(val))
.collect();
let mut if_true_args = if_true_args.to_vec();
let mut if_false_args = if_false_args.to_vec();
self.fill_block_params_with_locals(if_true, &mut if_true_args);
self.fill_block_params_with_locals(if_false, &mut if_false_args);
self.body.blocks[block].terminator = Terminator::CondBr {
cond: Operand::value(cond),
cond,
if_true: BlockTarget {
block: if_true,
args: if_true_args,
@ -850,13 +836,13 @@ impl<'a, 'b> FunctionBodyBuilder<'a, 'b> {
fn emit_br_table(
&mut self,
index: ValueId,
index: Value,
default_target: BlockId,
indexed_targets: &[BlockId],
args: &[ValueId],
args: &[Value],
) {
if let Some(block) = self.cur_block {
let args: Vec<Operand> = args.iter().map(|&arg| Operand::value(arg)).collect();
let args = args.to_vec();
let targets = indexed_targets
.iter()
.map(|&block| {
@ -873,16 +859,16 @@ impl<'a, 'b> FunctionBodyBuilder<'a, 'b> {
args: default_args,
};
self.body.blocks[block].terminator = Terminator::Select {
value: Operand::value(index),
value: index,
targets,
default,
};
}
}
fn emit_ret(&mut self, vals: &[ValueId]) {
fn emit_ret(&mut self, values: &[Value]) {
if let Some(block) = self.cur_block {
let values = vals.iter().map(|&value| Operand::value(value)).collect();
let values = values.to_vec();
self.body.blocks[block].terminator = Terminator::Return { values };
}
}
@ -899,12 +885,8 @@ impl<'a, 'b> FunctionBodyBuilder<'a, 'b> {
let mut block_param_num = 0;
for &ty in wasm_stack_val_tys.iter() {
let value_id = self.body.values.len() as ValueId;
self.body.values.push(ValueDef {
kind: ValueKind::BlockParam(block, block_param_num),
ty,
});
self.op_stack.push((ty, value_id));
let value = Value::blockparam(block, block_param_num);
self.op_stack.push((ty, value));
block_param_num += 1;
}
@ -913,13 +895,9 @@ impl<'a, 'b> FunctionBodyBuilder<'a, 'b> {
.iter()
.zip(block_param_locals.iter())
{
let value_id = self.body.values.len() as ValueId;
self.body.values.push(ValueDef {
kind: ValueKind::BlockParam(block, block_param_num),
ty,
});
let value = Value::blockparam(block, block_param_num);
block_param_num += 1;
self.locals.insert(local_id, (ty, value_id));
self.locals.insert(local_id, (ty, value));
}
}
}
@ -941,30 +919,24 @@ impl<'a, 'b> FunctionBodyBuilder<'a, 'b> {
for input in inputs.into_iter().rev() {
let (stack_top_ty, stack_top) = self.op_stack.pop().unwrap();
assert_eq!(stack_top_ty, input);
input_operands.push(Operand::value(stack_top));
input_operands.push(stack_top);
}
input_operands.reverse();
let mut output_operands = vec![];
let n_outputs = outputs.len();
for (i, output_ty) in outputs.into_iter().enumerate() {
let val = self.body.values.len() as ValueId;
output_operands.push(val);
self.body.values.push(ValueDef {
kind: ValueKind::Inst(block, inst, i),
ty: output_ty,
});
self.op_stack.push((output_ty, val));
self.op_stack.push((output_ty, Value::inst(block, inst, i)));
}
self.body.blocks[block].insts.push(Inst {
operator: op,
outputs: output_operands,
n_outputs,
inputs: input_operands,
});
} else {
let _ = self.pop_n(inputs.len());
for ty in outputs {
self.op_stack.push((ty, NO_VALUE));
self.op_stack.push((ty, Value::undef()));
}
}

168
src/ir.rs
View file

@ -2,16 +2,15 @@
use crate::{backend::Shape, cfg::CFGInfo, frontend};
use anyhow::Result;
use fxhash::FxHashMap;
use wasmparser::{FuncType, Operator, Type};
pub type SignatureId = usize;
pub type FuncId = usize;
pub type BlockId = usize;
pub type InstId = usize;
pub type ValueId = usize;
pub type LocalId = u32;
pub const NO_VALUE: ValueId = usize::MAX;
pub const INVALID_BLOCK: BlockId = usize::MAX;
#[derive(Clone, Debug, Default)]
@ -40,26 +39,15 @@ impl<'a> FuncDecl<'a> {
#[derive(Clone, Debug, Default)]
pub struct FunctionBody<'a> {
pub arg_values: Vec<Value>,
pub locals: Vec<Type>,
pub blocks: Vec<Block<'a>>,
pub values: Vec<ValueDef>,
}
#[derive(Clone, Debug)]
pub struct ValueDef {
pub kind: ValueKind,
pub ty: Type,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum ValueKind {
Arg(usize),
BlockParam(BlockId, usize),
Inst(BlockId, InstId, usize),
pub types: FxHashMap<Value, Type>,
}
#[derive(Clone, Debug, Default)]
pub struct Block<'a> {
pub id: BlockId,
pub params: Vec<Type>,
pub insts: Vec<Inst<'a>>,
pub terminator: Terminator,
@ -70,14 +58,18 @@ impl<'a> Block<'a> {
self.terminator.successors()
}
pub fn values<'b>(&'b self) -> impl Iterator<Item = ValueId> + 'b {
pub fn values<'b>(&'b self) -> impl Iterator<Item = Value> + 'b {
let block = self.id;
self.insts
.iter()
.map(|inst| inst.outputs.iter().cloned())
.enumerate()
.map(move |(inst_id, inst)| {
(0..inst.n_outputs).map(move |i| Value::inst(block, inst_id, i))
})
.flatten()
}
pub fn visit_operands<F: Fn(&Operand)>(&self, f: F) {
pub fn visit_values<F: Fn(&Value)>(&self, f: F) {
for inst in &self.insts {
for input in &inst.inputs {
f(input);
@ -95,7 +87,7 @@ impl<'a> Block<'a> {
}
}
pub fn update_operands<F: Fn(&mut Operand)>(&mut self, f: F) {
pub fn update_values<F: Fn(&mut Value)>(&mut self, f: F) {
for inst in &mut self.insts {
for input in &mut inst.inputs {
f(input);
@ -117,33 +109,123 @@ impl<'a> Block<'a> {
#[derive(Clone, Debug)]
pub struct Inst<'a> {
pub operator: Operator<'a>,
pub outputs: Vec<ValueId>,
pub inputs: Vec<Operand>,
pub n_outputs: usize,
pub inputs: Vec<Value>,
}
#[derive(Clone, Copy, Debug)]
pub enum Operand {
/// An SSA value.
Value(ValueId),
/// Undef values are produced when code is unreachable and thus
/// removed/never executed.
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Value(u64);
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[repr(u64)]
enum ValueTag {
/// Undefined value. Fields: tag(4) unused(60).
Undef = 0,
/// Function argument. Fields: tag(4) unused(52) index(8).
Arg = 1,
/// Block param. Fields: tag(4) unused(2) block(26) param(32).
BlockParam = 2,
/// Instruction output. Fields: tag(4) block(26) inst(26) output(8).
InstOutput = 3,
}
const VALUE_TAG_SHIFT: usize = 60;
impl Value {
pub fn undef() -> Self {
Value((ValueTag::Undef as u64) << VALUE_TAG_SHIFT)
}
pub fn arg(index: usize) -> Self {
assert!(index < 256);
Value(((ValueTag::Arg as u64) << VALUE_TAG_SHIFT) | (index as u64))
}
pub fn blockparam(block: BlockId, index: usize) -> Self {
assert!(index < 256);
assert!(block < (1 << 26));
Value(
((ValueTag::BlockParam as u64) << VALUE_TAG_SHIFT)
| ((block as u64) << 32)
| (index as u64),
)
}
pub fn inst(block: BlockId, inst: InstId, index: usize) -> Self {
assert!(index < 256);
assert!(block < (1 << 26));
assert!(inst < (1 << 26));
Value(
((ValueTag::InstOutput as u64) << VALUE_TAG_SHIFT)
| ((block as u64) << 34)
| ((inst as u64) << 8)
| (index as u64),
)
}
pub fn unpack(self) -> ValueKind {
let tag = self.0 >> VALUE_TAG_SHIFT;
match tag {
0 => ValueKind::Undef,
1 => ValueKind::Arg((self.0 & ((1 << 8) - 1)) as usize),
2 => ValueKind::BlockParam(
((self.0 >> 32) & ((1 << 26) - 1)) as usize,
(self.0 & 0xff) as usize,
),
3 => ValueKind::Inst(
((self.0 >> 34) & ((1 << 26) - 1)) as usize,
((self.0 >> 8) & ((1 << 26) - 1)) as usize,
(self.0 & 0xff) as usize,
),
_ => unreachable!(),
}
}
pub fn as_arg(self) -> Option<usize> {
match self.unpack() {
ValueKind::Arg(arg) => Some(arg),
_ => None,
}
}
pub fn as_blockparam(self) -> Option<(BlockId, usize)> {
match self.unpack() {
ValueKind::BlockParam(block, param) => Some((block, param)),
_ => None,
}
}
pub fn as_inst(self) -> Option<(BlockId, InstId, usize)> {
match self.unpack() {
ValueKind::Inst(block, inst, param) => Some((block, inst, param)),
_ => None,
}
}
}
impl std::fmt::Display for Value {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self.unpack() {
ValueKind::Undef => write!(f, "undef"),
ValueKind::Arg(i) => write!(f, "arg{}", i),
ValueKind::BlockParam(block, i) => write!(f, "block{}_{}", block, i),
ValueKind::Inst(block, inst, i) => write!(f, "inst{}_{}_{}", block, inst, i),
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum ValueKind {
Undef,
}
impl Operand {
pub fn value(value: ValueId) -> Self {
if value == NO_VALUE {
Operand::Undef
} else {
Operand::Value(value)
}
}
Arg(usize),
BlockParam(BlockId, usize),
Inst(BlockId, InstId, usize),
}
#[derive(Clone, Debug)]
pub struct BlockTarget {
pub block: BlockId,
pub args: Vec<Operand>,
pub args: Vec<Value>,
}
#[derive(Clone, Debug)]
@ -152,17 +234,17 @@ pub enum Terminator {
target: BlockTarget,
},
CondBr {
cond: Operand,
cond: Value,
if_true: BlockTarget,
if_false: BlockTarget,
},
Select {
value: Operand,
value: Value,
targets: Vec<BlockTarget>,
default: BlockTarget,
},
Return {
values: Vec<Operand>,
values: Vec<Value>,
},
None,
}
@ -174,7 +256,7 @@ impl std::default::Default for Terminator {
}
impl Terminator {
pub fn args(&self) -> Vec<Operand> {
pub fn args(&self) -> Vec<Value> {
match self {
Terminator::Br { target } => target.args.clone(),
Terminator::CondBr {

View file

@ -9,7 +9,6 @@ pub use wasmparser;
mod backend;
mod cfg;
mod dataflow;
mod frontend;
mod ir;
mod op_traits;

View file

@ -1,6 +1,6 @@
//! Metadata on operators.
use crate::ir::{Module, SignatureId, ValueId};
use crate::ir::{Module, SignatureId, Value};
use anyhow::{bail, Result};
use wasmparser::{Operator, Type};
@ -8,7 +8,7 @@ pub fn op_inputs(
module: &Module,
my_sig: SignatureId,
my_locals: &[Type],
op_stack: &[(Type, ValueId)],
op_stack: &[(Type, Value)],
op: &Operator<'_>,
) -> Result<Vec<Type>> {
match op {
@ -228,7 +228,7 @@ pub fn op_inputs(
pub fn op_outputs(
module: &Module,
my_locals: &[Type],
op_stack: &[(Type, ValueId)],
op_stack: &[(Type, Value)],
op: &Operator<'_>,
) -> Result<Vec<Type>> {
match op {

180
src/pass/dataflow.rs Normal file
View file

@ -0,0 +1,180 @@
//! Iterative dataflow analysis (forward and backward) using lattice
//! analysis values.
use crate::cfg::CFGInfo;
use crate::ir::*;
use crate::pass::Lattice;
use fxhash::{FxHashMap, FxHashSet};
use std::collections::hash_map::Entry as HashEntry;
use std::{collections::VecDeque, default::Default};
use wasmparser::Type;
impl<'a> FunctionBody<'a> {
fn insts(&self) -> impl Iterator<Item = &Inst<'a>> {
self.blocks.iter().map(|block| block.insts.iter()).flatten()
}
}
pub trait DataflowFunctions<L: Lattice> {
fn start_block(&self, _lattice: &mut L, _block: BlockId, _param_types: &[Type]) -> bool {
false
}
fn end_block(
&self,
_lattce: &mut L,
_block: BlockId,
_next: BlockId,
_terminator: &Terminator,
) -> bool {
false
}
fn instruction<'a>(
&self,
_lattice: &mut L,
_block: BlockId,
_instid: InstId,
_inst: &Inst<'a>,
) -> bool {
false
}
}
#[derive(Clone, Debug)]
pub struct ForwardDataflow<L: Lattice> {
block_in: FxHashMap<BlockId, L>,
}
impl<L: Lattice> ForwardDataflow<L> {
pub fn new<'a, D: DataflowFunctions<L>>(f: &FunctionBody<'a>, d: &D) -> Self {
let mut analysis = Self {
block_in: FxHashMap::default(),
};
analysis.compute(f, d);
analysis
}
fn compute<'a, D: DataflowFunctions<L>>(&mut self, f: &FunctionBody<'a>, d: &D) {
let mut workqueue = VecDeque::new();
let mut workqueue_set = FxHashSet::default();
workqueue.push_back(0);
workqueue_set.insert(0);
while let Some(block) = workqueue.pop_front() {
workqueue_set.remove(&block);
let mut value = self
.block_in
.entry(block)
.or_insert_with(|| L::top())
.clone();
d.start_block(&mut value, block, &f.blocks[block].params[..]);
for (instid, inst) in f.blocks[block].insts.iter().enumerate() {
d.instruction(&mut value, block, instid, inst);
}
let succs = f.blocks[block].terminator.successors();
for (i, &succ) in succs.iter().enumerate() {
let mut value = if i + 1 < succs.len() {
value.clone()
} else {
std::mem::replace(&mut value, L::top())
};
d.end_block(&mut value, block, succ, &f.blocks[block].terminator);
let (succ_in, mut changed) = match self.block_in.entry(succ) {
HashEntry::Vacant(v) => (v.insert(L::top()), true),
HashEntry::Occupied(o) => (o.into_mut(), false),
};
changed |= succ_in.meet_with(&value);
if changed && !workqueue_set.contains(&succ) {
workqueue.push_back(succ);
workqueue_set.insert(succ);
}
}
}
}
}
#[derive(Clone, Debug)]
pub struct BackwardDataflow<L: Lattice> {
block_out: FxHashMap<BlockId, L>,
}
impl<L: Lattice> BackwardDataflow<L> {
pub fn new<'a, D: DataflowFunctions<L>>(
f: &FunctionBody<'a>,
cfginfo: &CFGInfo,
d: &D,
) -> Self {
let mut analysis = Self {
block_out: FxHashMap::default(),
};
analysis.compute(f, cfginfo, d);
analysis
}
fn compute<'a, D: DataflowFunctions<L>>(
&mut self,
f: &FunctionBody<'a>,
cfginfo: &CFGInfo,
d: &D,
) {
let mut workqueue = VecDeque::new();
let mut workqueue_set = FxHashSet::default();
let returns = f
.blocks
.iter()
.enumerate()
.filter(|(_, block)| matches!(&block.terminator, &Terminator::Return { .. }))
.map(|(id, _)| id)
.collect::<Vec<BlockId>>();
for ret in returns {
workqueue.push_back(ret);
workqueue_set.insert(ret);
}
while let Some(block) = workqueue.pop_front() {
workqueue_set.remove(&block);
let mut value = self
.block_out
.entry(block)
.or_insert_with(|| L::top())
.clone();
for (instid, inst) in f.blocks[block].insts.iter().rev().enumerate() {
d.instruction(&mut value, block, instid, inst);
}
d.start_block(&mut value, block, &f.blocks[block].params[..]);
let preds = &cfginfo.block_preds[block];
for (i, pred) in preds.iter().cloned().enumerate() {
let mut value = if i + 1 < preds.len() {
value.clone()
} else {
std::mem::replace(&mut value, L::top())
};
d.end_block(&mut value, pred, block, &f.blocks[pred].terminator);
let (pred_out, mut changed) = match self.block_out.entry(pred) {
HashEntry::Vacant(v) => (v.insert(L::top()), true),
HashEntry::Occupied(o) => (o.into_mut(), false),
};
changed |= pred_out.meet_with(&value);
if changed && !workqueue_set.contains(&pred) {
workqueue.push_back(pred);
workqueue_set.insert(pred);
}
}
}
}
}

139
src/pass/lattice.rs Normal file
View file

@ -0,0 +1,139 @@
//! Lattice trait definition and some common implementations.
use crate::ir::*;
use regalloc2::indexset::IndexSet;
use std::fmt::Debug;
/// A lattice type used for an analysis.
///
/// The `meet` operator must compute the greatest lower bound for its
/// operands (that is, its result must be "less than or equal to" its
/// operands, according to the lattice's partial order, and must be
/// the greatest value that satisfies this condition). It must obey
/// the usual lattice laws:
///
/// * a `meet` a == a (reflexivity)
/// * a `meet` b == b `meet` a (commutativity)
/// * a `meet` (b `meet` c) == (a `meet` b) `meet` c (associativity)
/// * a `meet` top == a
/// * a `meet` bottom == bottom
///
/// Note that while we require that the lattice is a consistent
/// partial order, we don't actually require the user to implement
/// `PartialOrd` on the type, because we never make direct ordering
/// comparisons when we perform a dataflow analysis. Instead the
/// ordering is only implicitly depended upon, in order to ensure that
/// the analysis terminates. For this to be true, we also require that
/// the lattice has only a finite chain length -- that is, there must
/// not be an infinite ordered sequence in the lattice (or, moving to
/// "lesser" values will always reach bottom in finite steps).
pub trait Lattice: Clone + Debug {
/// Return the `top` lattice value.
fn top() -> Self;
/// Return the `bottom` lattice value.
fn bottom() -> Self;
/// Mutate self to `meet(self, other)`. Returns `true` if any
/// changes occurred.
fn meet_with(&mut self, other: &Self) -> bool;
}
/// An analysis-value lattice whose values are sets of `ValueId`
/// indices. `top` is empty and `bottom` is the universe set; the
/// `meet` function is a union. This is useful for may-analyses,
/// i.e. when an analysis computes whether a property *may* be true
/// about a value in some case.
#[derive(Clone, Debug)]
pub struct UnionBitSet {
set: IndexSet,
/// The set has degenerated to contain "the universe" (all
/// possible values).
universe: bool,
}
impl Lattice for UnionBitSet {
fn top() -> Self {
UnionBitSet {
set: IndexSet::new(),
universe: false,
}
}
fn bottom() -> Self {
UnionBitSet {
set: IndexSet::new(),
universe: true,
}
}
fn meet_with(&mut self, other: &UnionBitSet) -> bool {
if !self.universe && other.universe {
self.universe = true;
return true;
}
self.set.union_with(&other.set)
}
}
impl UnionBitSet {
pub fn contains(&self, index: usize) -> bool {
self.universe || self.set.get(index)
}
pub fn add(&mut self, index: usize) {
if !self.universe {
self.set.set(index, true);
}
}
pub fn remove(&mut self, index: usize) {
if !self.universe {
self.set.set(index, false);
}
}
}
/// An analysis-value lattice whose values are sets of `ValueId`
/// indices. `top` is the universe set and `bottom` is the empty set;
/// the `meet` function is an intersection. This is useful for
/// must-analyses, i.e. when an analysis computes whether a property
/// *must* be true about a value in all cases.
#[derive(Clone, Debug)]
pub struct IntersectionBitSet {
/// We store the dual to the actual set, i.e., elements that are
/// *not* included.
not_set: UnionBitSet,
}
impl Lattice for IntersectionBitSet {
fn top() -> Self {
// `top` here is the universe-set; the dual of this set is the
// empty-set, which is UnionBitSet's `top()`.
Self {
not_set: UnionBitSet::top(),
}
}
fn bottom() -> Self {
Self {
not_set: UnionBitSet::bottom(),
}
}
fn meet_with(&mut self, other: &IntersectionBitSet) -> bool {
self.not_set.meet_with(&other.not_set)
}
}
impl IntersectionBitSet {
pub fn contains(&self, index: usize) -> bool {
!self.not_set.contains(index)
}
pub fn add(&mut self, index: usize) {
self.not_set.remove(index);
}
pub fn remove(&mut self, index: usize) {
self.not_set.add(index);
}
}

11
src/pass/mod.rs Normal file
View file

@ -0,0 +1,11 @@
//! Pass framework: skeletons for common kinds of passes over code.
//!
//! Terminology note: a "pass" is a readonly analysis of a function
//! body. It does not mutate code; it only traverses the code in a
//! certain order, possibly multiple times (to converge), in order to
//! compute some derived information.
pub mod dataflow;
pub use dataflow::*;
pub mod lattice;
pub use lattice::*;