WIP.
This commit is contained in:
parent
7b804b02d7
commit
d954fa9fe6
|
@ -1,5 +1,35 @@
|
|||
//! Backend: IR to Wasm.
|
||||
|
||||
pub mod binaryen;
|
||||
pub mod lower;
|
||||
use crate::entity::{EntityRef, PerEntity};
|
||||
use crate::ir::{Block, FunctionBody, Value, ValueDef};
|
||||
use anyhow::Result;
|
||||
|
||||
pub mod stackify;
|
||||
use stackify::{Mark, Marks, RPOIndex, RPO};
|
||||
pub mod treeify;
|
||||
use treeify::Trees;
|
||||
|
||||
pub struct WasmBackend<'a> {
|
||||
body: &'a FunctionBody,
|
||||
rpo: RPO,
|
||||
marks: Marks,
|
||||
trees: Trees,
|
||||
}
|
||||
|
||||
impl<'a> WasmBackend<'a> {
|
||||
pub fn new(body: &'a FunctionBody) -> Result<WasmBackend<'a>> {
|
||||
let rpo = RPO::compute(body);
|
||||
let marks = Marks::compute(body, &rpo)?;
|
||||
let trees = Trees::compute(body);
|
||||
Ok(WasmBackend {
|
||||
body,
|
||||
rpo,
|
||||
marks,
|
||||
trees,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn compile(&self) -> Result<Vec<u8>> {
|
||||
Ok(vec![])
|
||||
}
|
||||
}
|
||||
|
|
251
src/backend/stackify.rs
Normal file
251
src/backend/stackify.rs
Normal file
|
@ -0,0 +1,251 @@
|
|||
//! Stackify implementation to produce structured control flow from an
|
||||
//! arbitrary CFG.
|
||||
//!
|
||||
//! Note on algorithm:
|
||||
//!
|
||||
//! - We sort in RPO, then mark loops, then place blocks within loops
|
||||
//! or at top level to give forward edges appropriate targets.
|
||||
//!
|
||||
//! - The RPO sort order we choose is quite special: we need loop
|
||||
//! bodies to be placed contiguously, without blocks that do not
|
||||
//! belong to the loop in the middle. Otherwise we may not be able
|
||||
//! to properly nest a block to allow a forward edge.
|
||||
//!
|
||||
//! Consider the following CFG:
|
||||
//!
|
||||
//! ```plain
|
||||
//! 1
|
||||
//! |
|
||||
//! 2 <-.
|
||||
//! / | |
|
||||
//! | 3 --'
|
||||
//! | |
|
||||
//! `> 4
|
||||
//! |
|
||||
//! 5
|
||||
//! ```
|
||||
//!
|
||||
//! A normal RPO sort may produce 1, 2, 4, 5, 3 or 1, 2, 3, 4, 5
|
||||
//! depending on which child order it chooses from block 2. (If it
|
||||
//! visits 3 first, it will emit it first in postorder hence it comes
|
||||
//! last.)
|
||||
//!
|
||||
//! One way of ensuring we get the right order would be to compute the
|
||||
//! loop nest and make note of loops when choosing children to visit,
|
||||
//! but we really would rather not do that, since we don't otherwise
|
||||
//! have the infrastructure to compute that or the need for it.
|
||||
//!
|
||||
//! Instead, we keep a "pending" list: as we have nodes on the stack
|
||||
//! during postorder traversal, we keep a list of other children that
|
||||
//! we will visit once we get back to a given level. If another node
|
||||
//! is pending, and is a successor we are considering, we visit it
|
||||
//! *first* in postorder, so it is last in RPO. This is a way to
|
||||
//! ensure that (e.g.) block 4 above is visited first when considering
|
||||
//! successors of block 2.
|
||||
|
||||
use crate::declare_entity;
|
||||
use crate::entity::{EntityRef, EntityVec, PerEntity};
|
||||
use crate::ir::{Block, FunctionBody};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
declare_entity!(RPOIndex, "rpo");
|
||||
|
||||
impl RPOIndex {
|
||||
pub fn prev(self) -> RPOIndex {
|
||||
RPOIndex(self.0.checked_sub(1).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RPO {
|
||||
pub order: EntityVec<RPOIndex, Block>,
|
||||
pub rev: PerEntity<Block, RPOIndex>,
|
||||
}
|
||||
|
||||
impl RPO {
|
||||
pub fn compute(body: &FunctionBody) -> RPO {
|
||||
let mut postorder = vec![];
|
||||
let mut visited = HashSet::new();
|
||||
let mut pending = vec![];
|
||||
let mut pending_idx = HashMap::new();
|
||||
visited.insert(body.entry);
|
||||
Self::visit(
|
||||
body,
|
||||
body.entry,
|
||||
&mut visited,
|
||||
&mut pending,
|
||||
&mut pending_idx,
|
||||
&mut postorder,
|
||||
);
|
||||
postorder.reverse();
|
||||
|
||||
let mut rev = PerEntity::default();
|
||||
for (i, block) in postorder.iter().copied().enumerate() {
|
||||
rev[block] = RPOIndex(i as u32);
|
||||
}
|
||||
|
||||
RPO {
|
||||
order: EntityVec::from(postorder),
|
||||
rev,
|
||||
}
|
||||
}
|
||||
|
||||
fn visit(
|
||||
body: &FunctionBody,
|
||||
block: Block,
|
||||
visited: &mut HashSet<Block>,
|
||||
pending: &mut Vec<Block>,
|
||||
pending_idx: &mut HashMap<Block, usize>,
|
||||
postorder: &mut Vec<Block>,
|
||||
) {
|
||||
// `pending` is a Vec, not a Set; we prioritize based on
|
||||
// position (first in pending go first in postorder -> last in
|
||||
// RPO). A case with nested loops to show why this matters:
|
||||
//
|
||||
// TODO example
|
||||
|
||||
let pending_top = pending.len();
|
||||
pending.extend(body.blocks[block].succs.clone());
|
||||
|
||||
// Sort new entries in `pending` by index at which they appear
|
||||
// earlier. Those that don't appear in `pending` at all should
|
||||
// be visited last (to appear in RPO first), so we want `None`
|
||||
// values to sort first here (hence the "unwrap or MAX"
|
||||
// idiom). Then those that appear earlier in `pending` should
|
||||
// be visited earlier here to appear later in RPO, so they
|
||||
// sort later.
|
||||
pending[pending_top..]
|
||||
.sort_by_key(|entry| pending_idx.get(entry).copied().unwrap_or(usize::MAX));
|
||||
|
||||
// Above we placed items in order they are to be visited;
|
||||
// below we pop off the end, so we reverse here.
|
||||
pending[pending_top..].reverse();
|
||||
|
||||
// Now update indices in `pending_idx`: insert entries for
|
||||
// those blocks not yet present.
|
||||
for i in pending_top..pending.len() {
|
||||
pending_idx.entry(pending[i]).or_insert(i);
|
||||
}
|
||||
|
||||
for _ in 0..(pending.len() - pending_top) {
|
||||
let succ = pending.pop().unwrap();
|
||||
if pending_idx.get(&succ) == Some(&pending.len()) {
|
||||
pending_idx.remove(&succ);
|
||||
}
|
||||
|
||||
if visited.insert(succ) {
|
||||
Self::visit(body, succ, visited, pending, pending_idx, postorder);
|
||||
}
|
||||
}
|
||||
postorder.push(block);
|
||||
}
|
||||
}
|
||||
|
||||
/// Start and end marks for loops.
|
||||
#[derive(Debug)]
|
||||
pub struct Marks(HashMap<RPOIndex, Vec<Mark>>);
|
||||
|
||||
// Sorting-order note: Loop comes second, so Blocks sort first with
|
||||
// smaller regions first. Thus, *reverse* sort order places loops
|
||||
// outermost then larger blocks before smaller blocks.
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub enum Mark {
|
||||
Block { last_inclusive: RPOIndex },
|
||||
Loop { last_inclusive: RPOIndex },
|
||||
}
|
||||
|
||||
impl Marks {
|
||||
pub fn compute(body: &FunctionBody, rpo: &RPO) -> anyhow::Result<Marks> {
|
||||
let mut marks = HashMap::new();
|
||||
|
||||
// Pass 1: Place loop markers.
|
||||
let mut loop_end: HashMap<RPOIndex, RPOIndex> = HashMap::new();
|
||||
for (rpo_block, &block) in rpo.order.entries() {
|
||||
for &succ in &body.blocks[block].succs {
|
||||
let rpo_succ = rpo.rev[succ];
|
||||
assert!(rpo_succ.is_valid());
|
||||
if rpo_succ <= rpo_block {
|
||||
let end = loop_end.entry(rpo_succ).or_insert(RPOIndex::invalid());
|
||||
if end.is_invalid() {
|
||||
*end = rpo_block;
|
||||
} else {
|
||||
// Already-existing loop header. Adjust `end`.
|
||||
*end = std::cmp::max(*end, rpo_block);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Pass 2: properly nest loops by extending the reach of outer
|
||||
// loops to fully contain inner loops.
|
||||
for rpo_block in rpo.order.iter().rev() {
|
||||
if let Some(rpo_loop_end) = loop_end.get(&rpo_block).copied() {
|
||||
let mut updated_end = rpo_loop_end;
|
||||
for body_block in rpo_block.index()..=rpo_loop_end.index() {
|
||||
let body_block = RPOIndex::new(body_block);
|
||||
if let Some(inner_end) = loop_end.get(&body_block).copied() {
|
||||
updated_end = std::cmp::max(updated_end, inner_end);
|
||||
}
|
||||
}
|
||||
if updated_end != rpo_loop_end {
|
||||
loop_end.insert(rpo_block, updated_end);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Pass 3: compute location of innermost loop for each
|
||||
// block.
|
||||
let mut innermost_loop: PerEntity<RPOIndex, Option<RPOIndex>> = PerEntity::default();
|
||||
let mut loop_stack: Vec<(RPOIndex, RPOIndex)> = vec![];
|
||||
for rpo_block in rpo.order.iter() {
|
||||
while let Some(innermost) = loop_stack.last() {
|
||||
if innermost.1 >= rpo_block {
|
||||
break;
|
||||
}
|
||||
loop_stack.pop();
|
||||
}
|
||||
|
||||
if let Some(rpo_loop_end) = loop_end.get(&rpo_block).copied() {
|
||||
loop_stack.push((rpo_block, rpo_loop_end));
|
||||
}
|
||||
|
||||
innermost_loop[rpo_block] = loop_stack.last().map(|lp| lp.0);
|
||||
}
|
||||
|
||||
// Copy loop-start markers over.
|
||||
for (lp, end) in loop_end {
|
||||
marks.insert(
|
||||
lp,
|
||||
vec![Mark::Loop {
|
||||
last_inclusive: end,
|
||||
}],
|
||||
);
|
||||
}
|
||||
|
||||
// Pass 4: place block markers.
|
||||
for (rpo_block, &block) in rpo.order.entries() {
|
||||
for &succ in &body.blocks[block].succs {
|
||||
let rpo_succ = rpo.rev[succ];
|
||||
assert!(rpo_succ.is_valid());
|
||||
if rpo_succ > rpo_block {
|
||||
// Determine the innermost loop for the target,
|
||||
// and add the block just inside the loop.
|
||||
let block_start = innermost_loop[rpo_succ].unwrap_or(RPOIndex(0));
|
||||
let start_marks = marks.entry(block_start).or_insert_with(|| vec![]);
|
||||
let mark = Mark::Block {
|
||||
last_inclusive: rpo_succ.prev(),
|
||||
};
|
||||
start_marks.push(mark);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort markers at each block.
|
||||
for marklist in marks.values_mut() {
|
||||
marklist.sort();
|
||||
marklist.dedup();
|
||||
marklist.reverse();
|
||||
}
|
||||
|
||||
Ok(Marks(marks))
|
||||
}
|
||||
}
|
85
src/backend/treeify.rs
Normal file
85
src/backend/treeify.rs
Normal file
|
@ -0,0 +1,85 @@
|
|||
//! Treeification: placing some values "under" others if only used
|
||||
//! once, to generate more AST-like Wasm code.
|
||||
|
||||
use crate::ir::{FunctionBody, Value, ValueDef};
|
||||
use crate::Operator;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::convert::TryFrom;
|
||||
|
||||
/// One "argument slot" of an operator defining a value.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub struct ValueArg(Value, u16);
|
||||
|
||||
pub struct Trees {
|
||||
/// Is a value placed "under" the given arg slot of the given
|
||||
/// other value?
|
||||
pub owner: HashMap<Value, ValueArg>,
|
||||
/// For a given value that is defined by an operator, which
|
||||
/// Values, if any, live at each slot?
|
||||
pub owned: HashMap<ValueArg, Value>,
|
||||
}
|
||||
|
||||
impl Trees {
|
||||
pub fn compute(body: &FunctionBody) -> Trees {
|
||||
let mut owner = HashMap::new();
|
||||
let mut owned = HashMap::new();
|
||||
let mut multi_use = HashSet::new();
|
||||
|
||||
for (value, def) in body.values.entries() {
|
||||
match def {
|
||||
&ValueDef::Operator(_, ref args, ref tys) => {
|
||||
// For each of the args, if the value is produced
|
||||
// by a single-output op and is movable, and is
|
||||
// not already recorded in `multi_use`, place it
|
||||
// in the arg slot. Otherwise if owned already
|
||||
// somewhere else, undo that and put in
|
||||
// `multi_use`.
|
||||
for (i, &arg) in args.iter().enumerate() {
|
||||
let arg = body.resolve_alias(arg);
|
||||
if multi_use.contains(&arg) {
|
||||
continue;
|
||||
} else if let Some(old_owner) = owner.remove(&arg) {
|
||||
owned.remove(&old_owner);
|
||||
multi_use.insert(arg);
|
||||
} else if Self::is_movable(body, arg) {
|
||||
let pos = u16::try_from(i).unwrap();
|
||||
let value_arg = ValueArg(value, pos);
|
||||
owner.insert(arg, value_arg);
|
||||
owned.insert(value_arg, arg);
|
||||
}
|
||||
}
|
||||
}
|
||||
&ValueDef::PickOutput(..) => {
|
||||
// Can ignore use: multi-arity values are never treeified.
|
||||
}
|
||||
&ValueDef::BlockParam(..)
|
||||
| &ValueDef::Alias(..)
|
||||
| &ValueDef::Placeholder(..)
|
||||
| &ValueDef::None => {}
|
||||
}
|
||||
}
|
||||
for block in body.blocks.values() {
|
||||
block.terminator.visit_uses(|u| {
|
||||
let u = body.resolve_alias(u);
|
||||
if let Some(old_owner) = owner.remove(&u) {
|
||||
owned.remove(&old_owner);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Trees { owner, owned }
|
||||
}
|
||||
|
||||
fn is_single_output_op(body: &FunctionBody, value: Value) -> Option<Operator> {
|
||||
match &body.values[value] {
|
||||
&ValueDef::Operator(op, _, ref tys) if tys.len() == 1 => Some(op),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_movable(body: &FunctionBody, value: Value) -> bool {
|
||||
Self::is_single_output_op(body, value)
|
||||
.map(|op| op.is_pure())
|
||||
.unwrap_or(false)
|
||||
}
|
||||
}
|
|
@ -16,6 +16,7 @@ pub trait EntityRef: Clone + Copy + PartialEq + Eq + PartialOrd + Ord + Hash {
|
|||
fn is_invalid(self) -> bool {
|
||||
self == Self::invalid()
|
||||
}
|
||||
fn maybe_index(self) -> Option<usize>;
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
|
@ -32,8 +33,16 @@ macro_rules! declare_entity {
|
|||
Self(value)
|
||||
}
|
||||
fn index(self) -> usize {
|
||||
debug_assert!(self.is_valid());
|
||||
self.0 as usize
|
||||
}
|
||||
fn maybe_index(self) -> Option<usize> {
|
||||
if self.is_valid() {
|
||||
Some(self.0 as usize)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
fn invalid() -> Self {
|
||||
Self(u32::MAX)
|
||||
}
|
||||
|
|
|
@ -226,8 +226,14 @@ impl<'a> Module<'a> {
|
|||
}
|
||||
|
||||
pub fn to_wasm_bytes(&self) -> Result<Vec<u8>> {
|
||||
let module = backend::lower::lower(self)?;
|
||||
module.write()
|
||||
for func_decl in self.funcs.values() {
|
||||
if let Some(body) = func_decl.body() {
|
||||
let comp = backend::WasmBackend::new(body)?;
|
||||
let _ = comp.compile()?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
pub fn display<'b>(&'b self) -> ModuleDisplay<'b>
|
||||
|
|
408
src/op_traits.rs
408
src/op_traits.rs
|
@ -448,219 +448,219 @@ pub enum SideEffect {
|
|||
Trap,
|
||||
ReadMem,
|
||||
WriteMem,
|
||||
ReadGlobal(Global),
|
||||
WriteGlobal(Global),
|
||||
ReadTable(Table),
|
||||
WriteTable(Table),
|
||||
ReadLocal(Local),
|
||||
WriteLocal(Local),
|
||||
ReadGlobal,
|
||||
WriteGlobal,
|
||||
ReadTable,
|
||||
WriteTable,
|
||||
ReadLocal,
|
||||
WriteLocal,
|
||||
Return,
|
||||
All,
|
||||
}
|
||||
|
||||
pub fn op_effects(op: &Operator) -> Cow<'static, [SideEffect]> {
|
||||
use SideEffect::*;
|
||||
impl Operator {
|
||||
pub fn effects(&self) -> &'static [SideEffect] {
|
||||
use SideEffect::*;
|
||||
|
||||
match op {
|
||||
&Operator::Unreachable => Cow::Borrowed(&[Trap]),
|
||||
&Operator::Nop => Cow::Borrowed(&[]),
|
||||
match self {
|
||||
&Operator::Unreachable => &[Trap],
|
||||
&Operator::Nop => &[],
|
||||
|
||||
&Operator::Call { .. } => Cow::Borrowed(&[All]),
|
||||
&Operator::CallIndirect { .. } => Cow::Borrowed(&[All]),
|
||||
&Operator::Return => Cow::Borrowed(&[Return]),
|
||||
&Operator::LocalSet { local_index, .. } => vec![WriteLocal(local_index)].into(),
|
||||
&Operator::LocalGet { local_index, .. } => vec![ReadLocal(local_index)].into(),
|
||||
&Operator::LocalTee { local_index, .. } => {
|
||||
vec![ReadLocal(local_index), WriteLocal(local_index)].into()
|
||||
&Operator::Call { .. } => &[All],
|
||||
&Operator::CallIndirect { .. } => &[All],
|
||||
&Operator::Return => &[Return],
|
||||
&Operator::LocalSet { .. } => &[WriteLocal],
|
||||
&Operator::LocalGet { .. } => &[ReadLocal],
|
||||
&Operator::LocalTee { .. } => &[ReadLocal, WriteLocal],
|
||||
|
||||
&Operator::Select => &[],
|
||||
&Operator::TypedSelect { .. } => &[],
|
||||
&Operator::GlobalGet { .. } => &[ReadGlobal],
|
||||
&Operator::GlobalSet { .. } => &[WriteGlobal],
|
||||
|
||||
Operator::I32Load { .. }
|
||||
| Operator::I32Load8S { .. }
|
||||
| Operator::I32Load8U { .. }
|
||||
| Operator::I32Load16S { .. }
|
||||
| Operator::I32Load16U { .. }
|
||||
| Operator::I64Load { .. }
|
||||
| Operator::I64Load8S { .. }
|
||||
| Operator::I64Load8U { .. }
|
||||
| Operator::I64Load16S { .. }
|
||||
| Operator::I64Load16U { .. }
|
||||
| Operator::I64Load32S { .. }
|
||||
| Operator::I64Load32U { .. }
|
||||
| Operator::F32Load { .. }
|
||||
| Operator::F64Load { .. } => &[Trap, ReadMem],
|
||||
|
||||
Operator::I32Store { .. }
|
||||
| Operator::I64Store { .. }
|
||||
| Operator::F32Store { .. }
|
||||
| Operator::F64Store { .. }
|
||||
| Operator::I32Store8 { .. }
|
||||
| Operator::I32Store16 { .. }
|
||||
| Operator::I64Store8 { .. }
|
||||
| Operator::I64Store16 { .. }
|
||||
| Operator::I64Store32 { .. } => &[Trap, WriteMem],
|
||||
|
||||
Operator::I32Const { .. }
|
||||
| Operator::I64Const { .. }
|
||||
| Operator::F32Const { .. }
|
||||
| Operator::F64Const { .. } => &[],
|
||||
|
||||
Operator::I32Eqz
|
||||
| Operator::I32Eq
|
||||
| Operator::I32Ne
|
||||
| Operator::I32LtS
|
||||
| Operator::I32LtU
|
||||
| Operator::I32GtS
|
||||
| Operator::I32GtU
|
||||
| Operator::I32LeS
|
||||
| Operator::I32LeU
|
||||
| Operator::I32GeS
|
||||
| Operator::I32GeU
|
||||
| Operator::I64Eqz
|
||||
| Operator::I64Eq
|
||||
| Operator::I64Ne
|
||||
| Operator::I64LtS
|
||||
| Operator::I64LtU
|
||||
| Operator::I64GtU
|
||||
| Operator::I64GtS
|
||||
| Operator::I64LeS
|
||||
| Operator::I64LeU
|
||||
| Operator::I64GeS
|
||||
| Operator::I64GeU
|
||||
| Operator::F32Eq
|
||||
| Operator::F32Ne
|
||||
| Operator::F32Lt
|
||||
| Operator::F32Gt
|
||||
| Operator::F32Le
|
||||
| Operator::F32Ge
|
||||
| Operator::F64Eq
|
||||
| Operator::F64Ne
|
||||
| Operator::F64Lt
|
||||
| Operator::F64Gt
|
||||
| Operator::F64Le
|
||||
| Operator::F64Ge => &[],
|
||||
|
||||
Operator::I32Clz
|
||||
| Operator::I32Ctz
|
||||
| Operator::I32Popcnt
|
||||
| Operator::I32Add
|
||||
| Operator::I32Sub
|
||||
| Operator::I32Mul
|
||||
| Operator::I32And
|
||||
| Operator::I32Or
|
||||
| Operator::I32Xor
|
||||
| Operator::I32Shl
|
||||
| Operator::I32ShrS
|
||||
| Operator::I32ShrU
|
||||
| Operator::I32Rotl
|
||||
| Operator::I32Rotr => &[],
|
||||
|
||||
Operator::I32DivS | Operator::I32DivU | Operator::I32RemS | Operator::I32RemU => {
|
||||
&[Trap]
|
||||
}
|
||||
|
||||
Operator::I64Clz
|
||||
| Operator::I64Ctz
|
||||
| Operator::I64Popcnt
|
||||
| Operator::I64Add
|
||||
| Operator::I64Sub
|
||||
| Operator::I64Mul
|
||||
| Operator::I64And
|
||||
| Operator::I64Or
|
||||
| Operator::I64Xor
|
||||
| Operator::I64Shl
|
||||
| Operator::I64ShrS
|
||||
| Operator::I64ShrU
|
||||
| Operator::I64Rotl
|
||||
| Operator::I64Rotr => &[],
|
||||
|
||||
Operator::I64DivS | Operator::I64DivU | Operator::I64RemS | Operator::I64RemU => {
|
||||
&[Trap]
|
||||
}
|
||||
|
||||
Operator::F32Abs
|
||||
| Operator::F32Neg
|
||||
| Operator::F32Ceil
|
||||
| Operator::F32Floor
|
||||
| Operator::F32Trunc
|
||||
| Operator::F32Nearest
|
||||
| Operator::F32Sqrt
|
||||
| Operator::F32Add
|
||||
| Operator::F32Sub
|
||||
| Operator::F32Mul
|
||||
| Operator::F32Div
|
||||
| Operator::F32Min
|
||||
| Operator::F32Max
|
||||
| Operator::F32Copysign => &[],
|
||||
|
||||
Operator::F64Abs
|
||||
| Operator::F64Neg
|
||||
| Operator::F64Ceil
|
||||
| Operator::F64Floor
|
||||
| Operator::F64Trunc
|
||||
| Operator::F64Nearest
|
||||
| Operator::F64Sqrt
|
||||
| Operator::F64Add
|
||||
| Operator::F64Sub
|
||||
| Operator::F64Mul
|
||||
| Operator::F64Div
|
||||
| Operator::F64Min
|
||||
| Operator::F64Max
|
||||
| Operator::F64Copysign => &[],
|
||||
|
||||
Operator::I32WrapI64 => &[],
|
||||
Operator::I32TruncF32S => &[Trap],
|
||||
Operator::I32TruncF32U => &[Trap],
|
||||
Operator::I32TruncF64S => &[Trap],
|
||||
Operator::I32TruncF64U => &[Trap],
|
||||
Operator::I64ExtendI32S => &[],
|
||||
Operator::I64ExtendI32U => &[],
|
||||
Operator::I64TruncF32S => &[Trap],
|
||||
Operator::I64TruncF32U => &[Trap],
|
||||
Operator::I64TruncF64S => &[Trap],
|
||||
Operator::I64TruncF64U => &[Trap],
|
||||
Operator::F32ConvertI32S => &[],
|
||||
Operator::F32ConvertI32U => &[],
|
||||
Operator::F32ConvertI64S => &[],
|
||||
Operator::F32ConvertI64U => &[],
|
||||
Operator::F32DemoteF64 => &[],
|
||||
Operator::F64ConvertI32S => &[],
|
||||
Operator::F64ConvertI32U => &[],
|
||||
Operator::F64ConvertI64S => &[],
|
||||
Operator::F64ConvertI64U => &[],
|
||||
Operator::F64PromoteF32 => &[],
|
||||
Operator::I32Extend8S => &[],
|
||||
Operator::I32Extend16S => &[],
|
||||
Operator::I64Extend8S => &[],
|
||||
Operator::I64Extend16S => &[],
|
||||
Operator::I64Extend32S => &[],
|
||||
Operator::I32TruncSatF32S => &[],
|
||||
Operator::I32TruncSatF32U => &[],
|
||||
Operator::I32TruncSatF64S => &[],
|
||||
Operator::I32TruncSatF64U => &[],
|
||||
Operator::I64TruncSatF32S => &[],
|
||||
Operator::I64TruncSatF32U => &[],
|
||||
Operator::I64TruncSatF64S => &[],
|
||||
Operator::I64TruncSatF64U => &[],
|
||||
Operator::F32ReinterpretI32 => &[],
|
||||
Operator::F64ReinterpretI64 => &[],
|
||||
Operator::I32ReinterpretF32 => &[],
|
||||
Operator::I64ReinterpretF64 => &[],
|
||||
Operator::TableGet { .. } => &[ReadTable, Trap],
|
||||
Operator::TableSet { .. } => &[WriteTable, Trap],
|
||||
Operator::TableGrow { .. } => &[WriteTable, Trap],
|
||||
Operator::TableSize { .. } => &[ReadTable],
|
||||
Operator::MemorySize { .. } => &[ReadMem],
|
||||
Operator::MemoryGrow { .. } => &[WriteMem, Trap],
|
||||
}
|
||||
|
||||
&Operator::Select => Cow::Borrowed(&[]),
|
||||
&Operator::TypedSelect { .. } => Cow::Borrowed(&[]),
|
||||
&Operator::GlobalGet { global_index, .. } => vec![ReadGlobal(global_index)].into(),
|
||||
&Operator::GlobalSet { global_index, .. } => vec![WriteGlobal(global_index)].into(),
|
||||
|
||||
Operator::I32Load { .. }
|
||||
| Operator::I32Load8S { .. }
|
||||
| Operator::I32Load8U { .. }
|
||||
| Operator::I32Load16S { .. }
|
||||
| Operator::I32Load16U { .. }
|
||||
| Operator::I64Load { .. }
|
||||
| Operator::I64Load8S { .. }
|
||||
| Operator::I64Load8U { .. }
|
||||
| Operator::I64Load16S { .. }
|
||||
| Operator::I64Load16U { .. }
|
||||
| Operator::I64Load32S { .. }
|
||||
| Operator::I64Load32U { .. }
|
||||
| Operator::F32Load { .. }
|
||||
| Operator::F64Load { .. } => Cow::Borrowed(&[Trap, ReadMem]),
|
||||
|
||||
Operator::I32Store { .. }
|
||||
| Operator::I64Store { .. }
|
||||
| Operator::F32Store { .. }
|
||||
| Operator::F64Store { .. }
|
||||
| Operator::I32Store8 { .. }
|
||||
| Operator::I32Store16 { .. }
|
||||
| Operator::I64Store8 { .. }
|
||||
| Operator::I64Store16 { .. }
|
||||
| Operator::I64Store32 { .. } => Cow::Borrowed(&[Trap, WriteMem]),
|
||||
|
||||
Operator::I32Const { .. }
|
||||
| Operator::I64Const { .. }
|
||||
| Operator::F32Const { .. }
|
||||
| Operator::F64Const { .. } => Cow::Borrowed(&[]),
|
||||
|
||||
Operator::I32Eqz
|
||||
| Operator::I32Eq
|
||||
| Operator::I32Ne
|
||||
| Operator::I32LtS
|
||||
| Operator::I32LtU
|
||||
| Operator::I32GtS
|
||||
| Operator::I32GtU
|
||||
| Operator::I32LeS
|
||||
| Operator::I32LeU
|
||||
| Operator::I32GeS
|
||||
| Operator::I32GeU
|
||||
| Operator::I64Eqz
|
||||
| Operator::I64Eq
|
||||
| Operator::I64Ne
|
||||
| Operator::I64LtS
|
||||
| Operator::I64LtU
|
||||
| Operator::I64GtU
|
||||
| Operator::I64GtS
|
||||
| Operator::I64LeS
|
||||
| Operator::I64LeU
|
||||
| Operator::I64GeS
|
||||
| Operator::I64GeU
|
||||
| Operator::F32Eq
|
||||
| Operator::F32Ne
|
||||
| Operator::F32Lt
|
||||
| Operator::F32Gt
|
||||
| Operator::F32Le
|
||||
| Operator::F32Ge
|
||||
| Operator::F64Eq
|
||||
| Operator::F64Ne
|
||||
| Operator::F64Lt
|
||||
| Operator::F64Gt
|
||||
| Operator::F64Le
|
||||
| Operator::F64Ge => Cow::Borrowed(&[]),
|
||||
|
||||
Operator::I32Clz
|
||||
| Operator::I32Ctz
|
||||
| Operator::I32Popcnt
|
||||
| Operator::I32Add
|
||||
| Operator::I32Sub
|
||||
| Operator::I32Mul
|
||||
| Operator::I32And
|
||||
| Operator::I32Or
|
||||
| Operator::I32Xor
|
||||
| Operator::I32Shl
|
||||
| Operator::I32ShrS
|
||||
| Operator::I32ShrU
|
||||
| Operator::I32Rotl
|
||||
| Operator::I32Rotr => Cow::Borrowed(&[]),
|
||||
|
||||
Operator::I32DivS | Operator::I32DivU | Operator::I32RemS | Operator::I32RemU => {
|
||||
Cow::Borrowed(&[Trap])
|
||||
}
|
||||
|
||||
Operator::I64Clz
|
||||
| Operator::I64Ctz
|
||||
| Operator::I64Popcnt
|
||||
| Operator::I64Add
|
||||
| Operator::I64Sub
|
||||
| Operator::I64Mul
|
||||
| Operator::I64And
|
||||
| Operator::I64Or
|
||||
| Operator::I64Xor
|
||||
| Operator::I64Shl
|
||||
| Operator::I64ShrS
|
||||
| Operator::I64ShrU
|
||||
| Operator::I64Rotl
|
||||
| Operator::I64Rotr => Cow::Borrowed(&[]),
|
||||
|
||||
Operator::I64DivS | Operator::I64DivU | Operator::I64RemS | Operator::I64RemU => {
|
||||
Cow::Borrowed(&[Trap])
|
||||
}
|
||||
|
||||
Operator::F32Abs
|
||||
| Operator::F32Neg
|
||||
| Operator::F32Ceil
|
||||
| Operator::F32Floor
|
||||
| Operator::F32Trunc
|
||||
| Operator::F32Nearest
|
||||
| Operator::F32Sqrt
|
||||
| Operator::F32Add
|
||||
| Operator::F32Sub
|
||||
| Operator::F32Mul
|
||||
| Operator::F32Div
|
||||
| Operator::F32Min
|
||||
| Operator::F32Max
|
||||
| Operator::F32Copysign => Cow::Borrowed(&[]),
|
||||
|
||||
Operator::F64Abs
|
||||
| Operator::F64Neg
|
||||
| Operator::F64Ceil
|
||||
| Operator::F64Floor
|
||||
| Operator::F64Trunc
|
||||
| Operator::F64Nearest
|
||||
| Operator::F64Sqrt
|
||||
| Operator::F64Add
|
||||
| Operator::F64Sub
|
||||
| Operator::F64Mul
|
||||
| Operator::F64Div
|
||||
| Operator::F64Min
|
||||
| Operator::F64Max
|
||||
| Operator::F64Copysign => Cow::Borrowed(&[]),
|
||||
|
||||
Operator::I32WrapI64 => Cow::Borrowed(&[]),
|
||||
Operator::I32TruncF32S => Cow::Borrowed(&[Trap]),
|
||||
Operator::I32TruncF32U => Cow::Borrowed(&[Trap]),
|
||||
Operator::I32TruncF64S => Cow::Borrowed(&[Trap]),
|
||||
Operator::I32TruncF64U => Cow::Borrowed(&[Trap]),
|
||||
Operator::I64ExtendI32S => Cow::Borrowed(&[]),
|
||||
Operator::I64ExtendI32U => Cow::Borrowed(&[]),
|
||||
Operator::I64TruncF32S => Cow::Borrowed(&[Trap]),
|
||||
Operator::I64TruncF32U => Cow::Borrowed(&[Trap]),
|
||||
Operator::I64TruncF64S => Cow::Borrowed(&[Trap]),
|
||||
Operator::I64TruncF64U => Cow::Borrowed(&[Trap]),
|
||||
Operator::F32ConvertI32S => Cow::Borrowed(&[]),
|
||||
Operator::F32ConvertI32U => Cow::Borrowed(&[]),
|
||||
Operator::F32ConvertI64S => Cow::Borrowed(&[]),
|
||||
Operator::F32ConvertI64U => Cow::Borrowed(&[]),
|
||||
Operator::F32DemoteF64 => Cow::Borrowed(&[]),
|
||||
Operator::F64ConvertI32S => Cow::Borrowed(&[]),
|
||||
Operator::F64ConvertI32U => Cow::Borrowed(&[]),
|
||||
Operator::F64ConvertI64S => Cow::Borrowed(&[]),
|
||||
Operator::F64ConvertI64U => Cow::Borrowed(&[]),
|
||||
Operator::F64PromoteF32 => Cow::Borrowed(&[]),
|
||||
Operator::I32Extend8S => Cow::Borrowed(&[]),
|
||||
Operator::I32Extend16S => Cow::Borrowed(&[]),
|
||||
Operator::I64Extend8S => Cow::Borrowed(&[]),
|
||||
Operator::I64Extend16S => Cow::Borrowed(&[]),
|
||||
Operator::I64Extend32S => Cow::Borrowed(&[]),
|
||||
Operator::I32TruncSatF32S => Cow::Borrowed(&[]),
|
||||
Operator::I32TruncSatF32U => Cow::Borrowed(&[]),
|
||||
Operator::I32TruncSatF64S => Cow::Borrowed(&[]),
|
||||
Operator::I32TruncSatF64U => Cow::Borrowed(&[]),
|
||||
Operator::I64TruncSatF32S => Cow::Borrowed(&[]),
|
||||
Operator::I64TruncSatF32U => Cow::Borrowed(&[]),
|
||||
Operator::I64TruncSatF64S => Cow::Borrowed(&[]),
|
||||
Operator::I64TruncSatF64U => Cow::Borrowed(&[]),
|
||||
Operator::F32ReinterpretI32 => Cow::Borrowed(&[]),
|
||||
Operator::F64ReinterpretI64 => Cow::Borrowed(&[]),
|
||||
Operator::I32ReinterpretF32 => Cow::Borrowed(&[]),
|
||||
Operator::I64ReinterpretF64 => Cow::Borrowed(&[]),
|
||||
Operator::TableGet { table_index, .. } => vec![ReadTable(*table_index), Trap].into(),
|
||||
Operator::TableSet { table_index, .. } => vec![WriteTable(*table_index), Trap].into(),
|
||||
Operator::TableGrow { table_index, .. } => vec![WriteTable(*table_index), Trap].into(),
|
||||
Operator::TableSize { table_index, .. } => vec![ReadTable(*table_index)].into(),
|
||||
Operator::MemorySize { .. } => Cow::Borrowed(&[ReadMem]),
|
||||
Operator::MemoryGrow { .. } => Cow::Borrowed(&[WriteMem, Trap]),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_pure(op: &Operator) -> bool {
|
||||
op_effects(op).is_empty()
|
||||
pub fn is_pure(&self) -> bool {
|
||||
self.effects().is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Operator {
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
use crate::cfg::CFGInfo;
|
||||
use crate::ir::*;
|
||||
use crate::op_traits::is_pure;
|
||||
use crate::passes::dom_pass::{dom_pass, DomtreePass};
|
||||
use crate::scoped_map::ScopedMap;
|
||||
use crate::Operator;
|
||||
|
@ -29,7 +28,7 @@ impl DomtreePass for GVNPass {
|
|||
|
||||
fn value_is_pure(value: Value, body: &FunctionBody) -> bool {
|
||||
match body.values[value] {
|
||||
ValueDef::Operator(op, ..) if is_pure(&op) => true,
|
||||
ValueDef::Operator(op, ..) if op.is_pure() => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
|
|
@ -131,15 +131,17 @@ impl RPO {
|
|||
postorder.push(block);
|
||||
}
|
||||
|
||||
fn map_block(&self, block: Block) -> Block {
|
||||
Block::new(self.rev[block].unwrap().index())
|
||||
fn map_block(&self, block: Block) -> Option<Block> {
|
||||
Some(Block::new(self.rev[block]?.index()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn run(body: &mut FunctionBody) {
|
||||
let rpo = RPO::compute(body);
|
||||
// Remap entry block.
|
||||
body.entry = rpo.map_block(body.entry);
|
||||
body.entry = rpo
|
||||
.map_block(body.entry)
|
||||
.expect("Entry block must be in RPO sequence");
|
||||
// Reorder blocks.
|
||||
let mut block_data = std::mem::take(&mut body.blocks).into_vec();
|
||||
let mut new_block_data = vec![];
|
||||
|
@ -150,13 +152,23 @@ pub fn run(body: &mut FunctionBody) {
|
|||
// Rewrite references in each terminator, pred and succ list.
|
||||
for block in body.blocks.values_mut() {
|
||||
block.terminator.update_targets(|target| {
|
||||
target.block = rpo.map_block(target.block);
|
||||
target.block = rpo
|
||||
.map_block(target.block)
|
||||
.expect("Target of reachable block must be reachable");
|
||||
});
|
||||
block.preds.retain_mut(|pred| {
|
||||
if let Some(new_pred) = rpo.map_block(*pred) {
|
||||
*pred = new_pred;
|
||||
true
|
||||
} else {
|
||||
// Some preds may be unreachable, so are not in RPO.
|
||||
false
|
||||
}
|
||||
});
|
||||
for pred in &mut block.preds {
|
||||
*pred = rpo.map_block(*pred);
|
||||
}
|
||||
for succ in &mut block.succs {
|
||||
*succ = rpo.map_block(*succ);
|
||||
*succ = rpo
|
||||
.map_block(*succ)
|
||||
.expect("Succ of reachable block must be reachable");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue