From 00a73f42cba671edade89f3f55612e68e2ca0ec2 Mon Sep 17 00:00:00 2001
From: rarbore2 <rarbore2@illinois.edu>
Date: Wed, 28 Feb 2024 14:24:09 -0600
Subject: [PATCH] Pass manager + forkify

---
 hercules_ir/src/def_use.rs              |  14 +-
 hercules_ir/src/ir.rs                   | 134 ++++++++----
 hercules_ir/src/loops.rs                |  10 +-
 hercules_ir/src/typecheck.rs            |   2 +-
 hercules_opt/src/forkify.rs             | 232 +++++++++++++++++++++
 hercules_opt/src/lib.rs                 |   4 +
 hercules_opt/src/pass.rs                | 266 ++++++++++++++++++++++++
 hercules_samples/ccp_example.hir        |  10 +-
 hercules_samples/sum_sample.hir         |  16 ++
 hercules_tools/hercules_cpu/src/main.rs |  32 +--
 hercules_tools/hercules_dot/src/main.rs |  34 +--
 11 files changed, 654 insertions(+), 100 deletions(-)
 create mode 100644 hercules_opt/src/forkify.rs
 create mode 100644 hercules_opt/src/pass.rs
 create mode 100644 hercules_samples/sum_sample.hir

diff --git a/hercules_ir/src/def_use.rs b/hercules_ir/src/def_use.rs
index 0116bb4e..73516ab5 100644
--- a/hercules_ir/src/def_use.rs
+++ b/hercules_ir/src/def_use.rs
@@ -1,8 +1,7 @@
 use crate::*;
 
 /*
- * Custom type for an immutable def_use map. This is a relatively efficient
- * storage of def_use edges, requiring 2 heap allocations.
+ * Custom type for an immutable def-use map.
  */
 #[derive(Debug, Clone)]
 pub struct ImmutableDefUseMap {
@@ -99,6 +98,17 @@ pub enum NodeUsesMut<'a> {
     Variable(Box<[&'a mut NodeID]>),
 }
 
+impl<'a> NodeUsesMut<'a> {
+    pub fn map(&mut self, old: NodeID, new: NodeID) {
+        let uses = self.as_mut();
+        for mut_ref in uses.into_iter() {
+            if **mut_ref == old {
+                **mut_ref = new;
+            }
+        }
+    }
+}
+
 impl<'a> AsRef<[NodeID]> for NodeUses<'a> {
     fn as_ref(&self) -> &[NodeID] {
         match self {
diff --git a/hercules_ir/src/ir.rs b/hercules_ir/src/ir.rs
index e2c8c39b..63d88313 100644
--- a/hercules_ir/src/ir.rs
+++ b/hercules_ir/src/ir.rs
@@ -254,46 +254,6 @@ pub enum TernaryOperator {
 }
 
 impl Module {
-    /*
-     * There are many transformations that need to iterate over the functions
-     * in a module, while having mutable access to the interned types,
-     * constants, and dynamic constants in a module. This code is really ugly,
-     * so write it once.
-     */
-    pub fn map<F>(self, mut func: F) -> Self
-    where
-        F: FnMut(
-            (Function, FunctionID),
-            (Vec<Type>, Vec<Constant>, Vec<DynamicConstant>),
-        ) -> (Function, (Vec<Type>, Vec<Constant>, Vec<DynamicConstant>)),
-    {
-        let Module {
-            functions,
-            types,
-            constants,
-            dynamic_constants,
-        } = self;
-        let mut stuff = (types, constants, dynamic_constants);
-        let functions = functions
-            .into_iter()
-            .enumerate()
-            .map(|(idx, function)| {
-                let mut new_stuff = (vec![], vec![], vec![]);
-                std::mem::swap(&mut stuff, &mut new_stuff);
-                let (function, mut new_stuff) = func((function, FunctionID::new(idx)), new_stuff);
-                std::mem::swap(&mut stuff, &mut new_stuff);
-                function
-            })
-            .collect();
-        let (types, constants, dynamic_constants) = stuff;
-        Module {
-            functions,
-            types,
-            constants,
-            dynamic_constants,
-        }
-    }
-
     /*
      * Printing out types, constants, and dynamic constants fully requires a
      * reference to the module, since references to other types, constants, and
@@ -565,9 +525,11 @@ impl Function {
     /*
      * Many transformations will delete nodes. There isn't strictly a gravestone
      * node value, so use the start node as a gravestone value (for IDs other
-     * than 0). This function cleans up gravestoned nodes.
+     * than 0). This function cleans up gravestoned nodes. This function returns
+     * a map from old IDs to the new IDs, so that other datastructures can be
+     * updated.
      */
-    pub fn delete_gravestones(&mut self) {
+    pub fn delete_gravestones(&mut self) -> Vec<NodeID> {
         // Step 1: figure out which nodes are gravestones.
         let mut gravestones = (0..self.nodes.len())
             .filter(|x| *x != 0 && self.nodes[*x].is_start())
@@ -603,7 +565,7 @@ impl Function {
                 let old_id = **u;
                 let new_id = node_mapping[old_id.idx()];
                 if new_id == NodeID::new(0) && old_id != NodeID::new(0) {
-                    panic!("While deleting gravestones, came across a use of a gravestoned node.");
+                    panic!("While deleting gravestones, came across a use of a gravestoned node. The user has ID {} and was using {}.", idx, old_id.idx());
                 }
                 **u = new_id;
             }
@@ -613,6 +575,29 @@ impl Function {
         }
 
         std::mem::swap(&mut new_nodes, &mut self.nodes);
+
+        node_mapping
+    }
+}
+
+/*
+ * Some analysis results can be updated after gravestone deletions.
+ */
+pub trait GraveUpdatable {
+    fn map_gravestones(&self, grave_mapping: &Vec<NodeID>) -> Self;
+}
+
+impl<T: Clone> GraveUpdatable for Vec<T> {
+    fn map_gravestones(&self, grave_mapping: &Vec<NodeID>) -> Self {
+        let mut new_self = vec![];
+        for (data, (idx, mapping)) in
+            std::iter::zip(self.into_iter(), grave_mapping.iter().enumerate())
+        {
+            if idx != 0 && mapping.idx() == 0 {
+                new_self.push(data.clone());
+            }
+        }
+        new_self
     }
 }
 
@@ -849,6 +834,69 @@ impl Node {
     );
     define_pattern_predicate!(is_match, Node::Match { control: _, sum: _ });
 
+    pub fn try_if(&self) -> Option<(NodeID, NodeID)> {
+        if let Node::If { control, cond } = self {
+            Some((*control, *cond))
+        } else {
+            None
+        }
+    }
+
+    pub fn try_phi(&self) -> Option<(NodeID, &[NodeID])> {
+        if let Node::Phi { control, data } = self {
+            Some((*control, data))
+        } else {
+            None
+        }
+    }
+
+    pub fn try_constant(&self) -> Option<ConstantID> {
+        if let Node::Constant { id } = self {
+            Some(*id)
+        } else {
+            None
+        }
+    }
+
+    pub fn try_dynamic_constant(&self) -> Option<DynamicConstantID> {
+        if let Node::DynamicConstant { id } = self {
+            Some(*id)
+        } else {
+            None
+        }
+    }
+
+    pub fn try_binary(&self, bop: BinaryOperator) -> Option<(NodeID, NodeID)> {
+        if let Node::Binary { left, right, op } = self
+            && *op == bop
+        {
+            Some((*left, *right))
+        } else {
+            None
+        }
+    }
+
+    pub fn try_control_read(&self, branch: usize) -> Option<NodeID> {
+        if let Node::Read { collect, indices } = self
+            && indices.len() == 1
+            && indices[0] == Index::Control(branch)
+        {
+            Some(*collect)
+        } else {
+            None
+        }
+    }
+
+    pub fn is_zero_constant(&self, constants: &Vec<Constant>) -> bool {
+        if let Node::Constant { id } = self
+            && constants[id.idx()].is_zero()
+        {
+            true
+        } else {
+            false
+        }
+    }
+
     /*
      * Read nodes can be considered control when following an if or match
      * node. However, it is sometimes useful to exclude such nodes when
diff --git a/hercules_ir/src/loops.rs b/hercules_ir/src/loops.rs
index cc302998..f966c0ad 100644
--- a/hercules_ir/src/loops.rs
+++ b/hercules_ir/src/loops.rs
@@ -1,5 +1,6 @@
 extern crate bitvec;
 
+use std::collections::hash_map;
 use std::collections::HashMap;
 
 use self::bitvec::prelude::*;
@@ -14,7 +15,10 @@ use crate::*;
  * join pairs, and so on. Each node in the loop tree has a representative
  * "header" node. For normal loops, this is the region node branched to by a
  * dominated if node. For fork join pairs, this is the fork node. A loop is a
- * top-level loop if its parent is the root node of the subgraph.
+ * top-level loop if its parent is the root node of the subgraph. Each node in
+ * the tree is an entry in the loops HashMap - the key is the "header" node for
+ * the loop, and the key is a pair of the set of control nodes inside the loop
+ * and this loop's parent header.
  */
 #[derive(Debug, Clone)]
 pub struct LoopTree {
@@ -26,6 +30,10 @@ impl LoopTree {
     pub fn contains(&self, x: NodeID) -> bool {
         x == self.root || self.loops.contains_key(&x)
     }
+
+    pub fn loops(&self) -> hash_map::Iter<'_, NodeID, (BitVec<u8, Lsb0>, NodeID)> {
+        self.loops.iter()
+    }
 }
 
 /*
diff --git a/hercules_ir/src/typecheck.rs b/hercules_ir/src/typecheck.rs
index bcd3a152..3529c47b 100644
--- a/hercules_ir/src/typecheck.rs
+++ b/hercules_ir/src/typecheck.rs
@@ -49,7 +49,7 @@ impl Semilattice for TypeSemilattice {
                     Concrete(*id1)
                 } else {
                     // Error will only allocate when a type error has occurred.
-                    // In that case, we're less concerned about speed to the
+                    // In that case, we're less concerned about speed of the
                     // compiler, and more allocations are acceptable.
                     Error(format!(
                         "Couldn't reconcile two different concrete types, with IDs {} and {}.",
diff --git a/hercules_opt/src/forkify.rs b/hercules_opt/src/forkify.rs
new file mode 100644
index 00000000..5b0920c5
--- /dev/null
+++ b/hercules_opt/src/forkify.rs
@@ -0,0 +1,232 @@
+extern crate hercules_ir;
+
+use std::iter::zip;
+
+use self::hercules_ir::def_use::*;
+use self::hercules_ir::ir::*;
+use self::hercules_ir::loops::*;
+
+/*
+ * Top level function to convert natural loops with simple induction variables
+ * into fork-joins.
+ */
+pub fn forkify(
+    function: &mut Function,
+    constants: &Vec<Constant>,
+    dynamic_constants: &mut Vec<DynamicConstant>,
+    def_use: &ImmutableDefUseMap,
+    loops: &LoopTree,
+) {
+    // Ignore loops that are already fork-joins.
+    let natural_loops = loops
+        .loops()
+        .filter(|(k, _)| function.nodes[k.idx()].is_region());
+
+    // Detect loops that have a simple loop induction variable. TODO: proper
+    // affine analysis to recognize other cases of linear induction variables.
+    let affine_loops: Vec<_> = natural_loops
+        .into_iter()
+        .filter_map(|(header, (contents, _))| {
+            // Get the single loop contained predecessor of the loop header.
+            let header_uses = get_uses(&function.nodes[header.idx()]);
+            let mut pred_loop = header_uses.as_ref().iter().filter(|id| contents[id.idx()]);
+            let single_pred_loop = pred_loop.next()?;
+            if pred_loop.next().is_some() || header_uses.as_ref().len() != 2 {
+                return None;
+            }
+
+            // Check for a very particular loop indexing structure.
+            let if_ctrl = function.nodes[single_pred_loop.idx()].try_control_read(1)?;
+            let (_, if_cond) = function.nodes[if_ctrl.idx()].try_if()?;
+            let (idx, bound) = function.nodes[if_cond.idx()].try_binary(BinaryOperator::LT)?;
+            let (phi, one) = function.nodes[idx.idx()].try_binary(BinaryOperator::Add)?;
+            let (should_be_header, pred_datas) = function.nodes[phi.idx()].try_phi()?;
+            let one_c_id = function.nodes[one.idx()].try_constant()?;
+
+            if should_be_header != *header || !constants[one_c_id.idx()].is_one() {
+                return None;
+            }
+
+            // Check that phi's if predecessor is the add node, and check that the
+            // phi's other predecessors are zeros.
+            zip(header_uses.as_ref().iter(), pred_datas.iter())
+                .position(|(c, d)| *c == *single_pred_loop && *d == idx)?;
+            if zip(header_uses.as_ref().iter(), pred_datas.iter())
+                .filter(|(c, d)| {
+                    (**c != *single_pred_loop)
+                        && !function.nodes[d.idx()].is_zero_constant(constants)
+                })
+                .count()
+                != 0
+            {
+                return None;
+            }
+
+            // Check for constant used as loop bound. Do this last, since we may
+            // create a new dynamic constant here.
+            let bound_dc_id =
+                if let Some(bound_dc_id) = function.nodes[bound.idx()].try_dynamic_constant() {
+                    bound_dc_id
+                } else if let Some(bound_c_id) = function.nodes[bound.idx()].try_constant() {
+                    // Create new dynamic constant that reflects this constant.
+                    let dc = match constants[bound_c_id.idx()] {
+                        Constant::Integer8(x) => DynamicConstant::Constant(x as _),
+                        Constant::Integer16(x) => DynamicConstant::Constant(x as _),
+                        Constant::Integer32(x) => DynamicConstant::Constant(x as _),
+                        Constant::Integer64(x) => DynamicConstant::Constant(x as _),
+                        Constant::UnsignedInteger8(x) => DynamicConstant::Constant(x as _),
+                        Constant::UnsignedInteger16(x) => DynamicConstant::Constant(x as _),
+                        Constant::UnsignedInteger32(x) => DynamicConstant::Constant(x as _),
+                        Constant::UnsignedInteger64(x) => DynamicConstant::Constant(x as _),
+                        _ => return None,
+                    };
+
+                    // The new dynamic constant may already be interned.
+                    let maybe_already_in = dynamic_constants
+                        .iter()
+                        .enumerate()
+                        .find(|(_, x)| **x == dc)
+                        .map(|(idx, _)| idx);
+                    if let Some(bound_dc_idx) = maybe_already_in {
+                        DynamicConstantID::new(bound_dc_idx)
+                    } else {
+                        let id = DynamicConstantID::new(dynamic_constants.len());
+                        dynamic_constants.push(dc);
+                        id
+                    }
+                } else {
+                    return None;
+                };
+
+            Some((header, phi, contents, bound_dc_id))
+        })
+        .collect();
+
+    // Convert affine loops into fork-joins.
+    for (header, idx_phi, contents, dc_id) in affine_loops {
+        let header_uses = get_uses(&function.nodes[header.idx()]);
+        let header_uses: Vec<_> = header_uses.as_ref().into_iter().map(|x| *x).collect();
+
+        // Get the control portions of the loop that need to be grafted.
+        let loop_pred = *header_uses
+            .iter()
+            .filter(|id| !contents[id.idx()])
+            .next()
+            .unwrap();
+        let loop_true_read = *header_uses
+            .iter()
+            .filter(|id| contents[id.idx()])
+            .next()
+            .unwrap();
+        let loop_end = function.nodes[loop_true_read.idx()]
+            .try_control_read(1)
+            .unwrap();
+        let loop_false_read = *def_use
+            .get_users(loop_end)
+            .iter()
+            .filter_map(|id| {
+                if function.nodes[id.idx()].try_control_read(0).is_some() {
+                    Some(id)
+                } else {
+                    None
+                }
+            })
+            .next()
+            .unwrap();
+        let loop_dst = def_use.get_users(loop_false_read)[0];
+
+        // Create fork and join nodes.
+        let fork = Node::Fork {
+            control: loop_pred,
+            factor: dc_id,
+        };
+        let fork_id = NodeID::new(function.nodes.len());
+        function.nodes.push(fork);
+
+        let join = Node::Join {
+            control: if *header == get_uses(&function.nodes[loop_end.idx()]).as_ref()[0] {
+                fork_id
+            } else {
+                function.nodes[loop_end.idx()].try_if().unwrap().0
+            },
+        };
+        let join_id = NodeID::new(function.nodes.len());
+        function.nodes.push(join);
+
+        // Reconnect control nodes.
+        get_uses_mut(&mut function.nodes[loop_dst.idx()]).map(loop_false_read, join_id);
+
+        // Convert reducing phi nodes to reduce nodes.
+        let reduction_phis: Vec<_> = def_use
+            .get_users(*header)
+            .iter()
+            .filter(|id| **id != idx_phi && function.nodes[id.idx()].is_phi())
+            .collect();
+        for reduction_phi in reduction_phis {
+            // Loop predecessor input to phi is the reduction initializer.
+            let init = *zip(
+                header_uses.iter(),
+                function.nodes[reduction_phi.idx()]
+                    .try_phi()
+                    .unwrap()
+                    .1
+                    .iter(),
+            )
+            .filter(|(c, _)| **c == loop_pred)
+            .next()
+            .unwrap()
+            .1;
+
+            // Loop back edge input to phi is the reduction induction variable.
+            let reduct = *zip(
+                header_uses.iter(),
+                function.nodes[reduction_phi.idx()]
+                    .try_phi()
+                    .unwrap()
+                    .1
+                    .iter(),
+            )
+            .filter(|(c, _)| **c == loop_true_read)
+            .next()
+            .unwrap()
+            .1;
+
+            // Create reduction node.
+            let reduce = Node::Reduce {
+                control: join_id,
+                init,
+                reduct,
+            };
+            let reduce_id = NodeID::new(function.nodes.len());
+            function.nodes.push(reduce);
+
+            // Edit users of phis.
+            for user in def_use.get_users(*reduction_phi) {
+                get_uses_mut(&mut function.nodes[user.idx()]).map(*reduction_phi, reduce_id);
+            }
+
+            // Edit users of uses of phis.
+            for user in def_use.get_users(reduct) {
+                get_uses_mut(&mut function.nodes[user.idx()]).map(reduct, reduce_id);
+            }
+
+            // Delete reducing phi.
+            function.nodes[reduction_phi.idx()] = Node::Start;
+        }
+
+        // Convert index phi node to thread ID node.
+        let thread_id = Node::ThreadID { control: fork_id };
+        let thread_id_id = NodeID::new(function.nodes.len());
+        function.nodes.push(thread_id);
+        for user in def_use.get_users(idx_phi) {
+            get_uses_mut(&mut function.nodes[user.idx()]).map(idx_phi, thread_id_id);
+        }
+        function.nodes[idx_phi.idx()] = Node::Start;
+
+        // Delete old loop control nodes;
+        function.nodes[header.idx()] = Node::Start;
+        function.nodes[loop_end.idx()] = Node::Start;
+        function.nodes[loop_true_read.idx()] = Node::Start;
+        function.nodes[loop_false_read.idx()] = Node::Start;
+    }
+}
diff --git a/hercules_opt/src/lib.rs b/hercules_opt/src/lib.rs
index 309c4e23..2cdf4c14 100644
--- a/hercules_opt/src/lib.rs
+++ b/hercules_opt/src/lib.rs
@@ -1,7 +1,11 @@
 pub mod ccp;
 pub mod dce;
+pub mod forkify;
 pub mod gvn;
+pub mod pass;
 
 pub use crate::ccp::*;
 pub use crate::dce::*;
+pub use crate::forkify::*;
 pub use crate::gvn::*;
+pub use crate::pass::*;
diff --git a/hercules_opt/src/pass.rs b/hercules_opt/src/pass.rs
new file mode 100644
index 00000000..713a9c4a
--- /dev/null
+++ b/hercules_opt/src/pass.rs
@@ -0,0 +1,266 @@
+extern crate hercules_ir;
+
+use std::collections::HashMap;
+use std::iter::zip;
+
+use self::hercules_ir::dataflow::*;
+use self::hercules_ir::def_use::*;
+use self::hercules_ir::dom::*;
+use self::hercules_ir::ir::*;
+use self::hercules_ir::loops::*;
+use self::hercules_ir::subgraph::*;
+use self::hercules_ir::typecheck::*;
+use self::hercules_ir::verify::*;
+
+use crate::*;
+
+/*
+ * Passes that can be run on a module.
+ */
+#[derive(Debug, Clone)]
+pub enum Pass {
+    DCE,
+    CCP,
+    GVN,
+    Forkify,
+    Verify,
+}
+
+/*
+ * Manages passes to be run on an IR module. Transparently handles analysis
+ * requirements for optimizations.
+ */
+#[derive(Debug, Clone)]
+pub struct PassManager {
+    module: Module,
+
+    // Passes to run.
+    passes: Vec<Pass>,
+
+    // Cached analysis results.
+    def_uses: Option<Vec<ImmutableDefUseMap>>,
+    reverse_postorders: Option<Vec<Vec<NodeID>>>,
+    typing: Option<ModuleTyping>,
+    control_subgraphs: Option<Vec<Subgraph>>,
+    doms: Option<Vec<DomTree>>,
+    postdoms: Option<Vec<DomTree>>,
+    fork_join_maps: Option<Vec<HashMap<NodeID, NodeID>>>,
+    loops: Option<Vec<LoopTree>>,
+}
+
+impl PassManager {
+    pub fn new(module: Module) -> Self {
+        PassManager {
+            module,
+            passes: vec![],
+            def_uses: None,
+            reverse_postorders: None,
+            typing: None,
+            control_subgraphs: None,
+            doms: None,
+            postdoms: None,
+            fork_join_maps: None,
+            loops: None,
+        }
+    }
+
+    pub fn add_pass(&mut self, pass: Pass) {
+        self.passes.push(pass);
+    }
+
+    fn make_def_uses(&mut self) {
+        if self.def_uses.is_none() {
+            self.def_uses = Some(self.module.functions.iter().map(def_use).collect());
+        }
+    }
+
+    fn make_reverse_postorders(&mut self) {
+        if self.reverse_postorders.is_none() {
+            self.make_def_uses();
+            self.reverse_postorders = Some(
+                self.def_uses
+                    .as_ref()
+                    .unwrap()
+                    .iter()
+                    .map(reverse_postorder)
+                    .collect(),
+            );
+        }
+    }
+
+    fn make_typing(&mut self) {
+        if self.typing.is_none() {
+            self.make_reverse_postorders();
+            self.typing = Some(
+                typecheck(&mut self.module, self.reverse_postorders.as_ref().unwrap()).unwrap(),
+            );
+        }
+    }
+
+    fn make_control_subgraphs(&mut self) {
+        if self.control_subgraphs.is_none() {
+            self.make_def_uses();
+            self.control_subgraphs = Some(
+                zip(&self.module.functions, self.def_uses.as_ref().unwrap())
+                    .map(|(function, def_use)| control_subgraph(function, def_use))
+                    .collect(),
+            );
+        }
+    }
+
+    fn make_doms(&mut self) {
+        if self.doms.is_none() {
+            self.make_control_subgraphs();
+            self.doms = Some(
+                self.control_subgraphs
+                    .as_ref()
+                    .unwrap()
+                    .iter()
+                    .map(|subgraph| dominator(subgraph, NodeID::new(0)))
+                    .collect(),
+            );
+        }
+    }
+
+    fn make_postdoms(&mut self) {
+        if self.postdoms.is_none() {
+            self.make_control_subgraphs();
+            self.postdoms = Some(
+                zip(
+                    self.control_subgraphs.as_ref().unwrap().iter(),
+                    self.module.functions.iter(),
+                )
+                .map(|(subgraph, function)| dominator(subgraph, NodeID::new(function.nodes.len())))
+                .collect(),
+            );
+        }
+    }
+
+    fn make_fork_join_maps(&mut self) {
+        if self.fork_join_maps.is_none() {
+            self.make_typing();
+            self.fork_join_maps = Some(
+                zip(
+                    self.module.functions.iter(),
+                    self.typing.as_ref().unwrap().iter(),
+                )
+                .map(|(function, typing)| fork_join_map(function, typing, &self.module.types))
+                .collect(),
+            );
+        }
+    }
+
+    fn make_loops(&mut self) {
+        if self.loops.is_none() {
+            self.make_control_subgraphs();
+            self.make_doms();
+            self.make_fork_join_maps();
+            let control_subgraphs = self.control_subgraphs.as_ref().unwrap().iter();
+            let doms = self.doms.as_ref().unwrap().iter();
+            let fork_join_maps = self.fork_join_maps.as_ref().unwrap().iter();
+            self.loops = Some(
+                zip(control_subgraphs, zip(doms, fork_join_maps))
+                    .map(|(control_subgraph, (dom, fork_join_map))| {
+                        loops(control_subgraph, NodeID::new(0), dom, fork_join_map)
+                    })
+                    .collect(),
+            );
+        }
+    }
+
+    pub fn run_passes(mut self) -> Module {
+        for pass in self.passes.clone().iter() {
+            match pass {
+                Pass::DCE => {
+                    for idx in 0..self.module.functions.len() {
+                        dce(&mut self.module.functions[idx]);
+                    }
+                }
+                Pass::CCP => {
+                    self.make_def_uses();
+                    self.make_reverse_postorders();
+                    let def_uses = self.def_uses.as_ref().unwrap();
+                    let reverse_postorders = self.reverse_postorders.as_ref().unwrap();
+                    for idx in 0..self.module.functions.len() {
+                        ccp(
+                            &mut self.module.functions[idx],
+                            &self.module.types,
+                            &mut self.module.constants,
+                            &def_uses[idx],
+                            &reverse_postorders[idx],
+                        );
+                    }
+                }
+                Pass::GVN => {
+                    self.make_def_uses();
+                    let def_uses = self.def_uses.as_ref().unwrap();
+                    for idx in 0..self.module.functions.len() {
+                        gvn(
+                            &mut self.module.functions[idx],
+                            &self.module.constants,
+                            &def_uses[idx],
+                        );
+                    }
+                }
+                Pass::Forkify => {
+                    self.make_def_uses();
+                    self.make_loops();
+                    let def_uses = self.def_uses.as_ref().unwrap();
+                    let loops = self.loops.as_ref().unwrap();
+                    for idx in 0..self.module.functions.len() {
+                        forkify(
+                            &mut self.module.functions[idx],
+                            &self.module.constants,
+                            &mut self.module.dynamic_constants,
+                            &def_uses[idx],
+                            &loops[idx],
+                        )
+                    }
+                }
+                Pass::Verify => {
+                    let (
+                        def_uses,
+                        reverse_postorders,
+                        typing,
+                        subgraphs,
+                        doms,
+                        postdoms,
+                        fork_join_maps,
+                    ) = verify(&mut self.module)
+                        .expect("PANIC: Failed to verify Hercules IR module.");
+
+                    // Verification produces a bunch of analysis results that
+                    // may be useful for later passes.
+                    self.def_uses = Some(def_uses);
+                    self.reverse_postorders = Some(reverse_postorders);
+                    self.typing = Some(typing);
+                    self.control_subgraphs = Some(subgraphs);
+                    self.doms = Some(doms);
+                    self.postdoms = Some(postdoms);
+                    self.fork_join_maps = Some(fork_join_maps);
+
+                    // Verification doesn't require clearing analysis results.
+                    continue;
+                }
+            }
+
+            for idx in 0..self.module.functions.len() {
+                self.module.functions[idx].delete_gravestones();
+            }
+            self.clear_analyses();
+        }
+
+        self.module
+    }
+
+    fn clear_analyses(&mut self) {
+        self.def_uses = None;
+        self.reverse_postorders = None;
+        self.typing = None;
+        self.control_subgraphs = None;
+        self.doms = None;
+        self.postdoms = None;
+        self.fork_join_maps = None;
+        self.loops = None;
+    }
+}
diff --git a/hercules_samples/ccp_example.hir b/hercules_samples/ccp_example.hir
index 618a7573..4a255f0d 100644
--- a/hercules_samples/ccp_example.hir
+++ b/hercules_samples/ccp_example.hir
@@ -6,14 +6,14 @@ fn tricky(x: i32) -> i32
   val = phi(loop, one, later_val)
   b = ne(one, val)
   if1 = if(loop, b)
-  if1_false = read_prod(if1, 0)
-  if1_true = read_prod(if1, 1)
+  if1_false = read(if1, control(0))
+  if1_true = read(if1, control(1))
   middle = region(if1_false, if1_true)
   inter_val = sub(two, val)
   later_val = phi(middle, inter_val, two)
   idx_dec = sub(idx, one)
   cond = gte(idx_dec, one)
   if2 = if(middle, cond)
-  if2_false = read_prod(if2, 0)
-  if2_true = read_prod(if2, 1)
-  r = return(if2_false, later_val)
\ No newline at end of file
+  if2_false = read(if2, control(0))
+  if2_true = read(if2, control(1))
+  r = return(if2_false, later_val)
diff --git a/hercules_samples/sum_sample.hir b/hercules_samples/sum_sample.hir
new file mode 100644
index 00000000..55852e7f
--- /dev/null
+++ b/hercules_samples/sum_sample.hir
@@ -0,0 +1,16 @@
+fn sum(a: array(f32, 16)) -> f32
+  zero_idx = constant(u64, 0)
+  one_idx = constant(u64, 1)
+  zero_inc = constant(f32, 0)
+  bound = constant(u64, 16)
+  loop = region(start, if_true)
+  idx = phi(loop, zero_idx, idx_inc)
+  idx_inc = add(idx, one_idx)
+  red = phi(loop, zero_inc, red_add)
+  read = read(a, position(idx))
+  red_add = add(red, read)
+  in_bounds = lt(idx_inc, bound)
+  if = if(loop, in_bounds)
+  if_false = read(if, control(0))
+  if_true = read(if, control(1))
+  r = return(if_false, red_add)
diff --git a/hercules_tools/hercules_cpu/src/main.rs b/hercules_tools/hercules_cpu/src/main.rs
index 209d8619..6a5d2bd2 100644
--- a/hercules_tools/hercules_cpu/src/main.rs
+++ b/hercules_tools/hercules_cpu/src/main.rs
@@ -24,36 +24,20 @@ fn main() {
     let mut contents = String::new();
     file.read_to_string(&mut contents)
         .expect("PANIC: Unable to read input file contents.");
-    let mut module =
+    let module =
         hercules_ir::parse::parse(&contents).expect("PANIC: Failed to parse Hercules IR file.");
-    let (def_uses, reverse_postorders, _typing, _subgraphs, _doms, _postdoms, _fork_join_maps) =
-        hercules_ir::verify::verify(&mut module)
-            .expect("PANIC: Failed to verify Hercules IR module.");
 
-    let mut module = module.map(
-        |(mut function, id), (types, mut constants, dynamic_constants)| {
-            hercules_opt::ccp::ccp(
-                &mut function,
-                &types,
-                &mut constants,
-                &def_uses[id.idx()],
-                &reverse_postorders[id.idx()],
-            );
-            hercules_opt::dce::dce(&mut function);
-            function.delete_gravestones();
+    let mut pm = hercules_opt::pass::PassManager::new(module);
+    pm.add_pass(hercules_opt::pass::Pass::Verify);
+    pm.add_pass(hercules_opt::pass::Pass::CCP);
+    pm.add_pass(hercules_opt::pass::Pass::DCE);
+    pm.add_pass(hercules_opt::pass::Pass::GVN);
+    pm.add_pass(hercules_opt::pass::Pass::DCE);
+    let mut module = pm.run_passes();
 
-            let def_use = hercules_ir::def_use::def_use(&function);
-            hercules_opt::gvn::gvn(&mut function, &constants, &def_use);
-            hercules_opt::dce::dce(&mut function);
-            function.delete_gravestones();
-
-            (function, (types, constants, dynamic_constants))
-        },
-    );
     let (def_uses, reverse_postorders, typing, subgraphs, doms, _postdoms, fork_join_maps) =
         hercules_ir::verify::verify(&mut module)
             .expect("PANIC: Failed to verify Hercules IR module.");
-
     let antideps: Vec<_> = module
         .functions
         .iter()
diff --git a/hercules_tools/hercules_dot/src/main.rs b/hercules_tools/hercules_dot/src/main.rs
index 4c80b17d..39f2057a 100644
--- a/hercules_tools/hercules_dot/src/main.rs
+++ b/hercules_tools/hercules_dot/src/main.rs
@@ -32,36 +32,22 @@ fn main() {
     let mut contents = String::new();
     file.read_to_string(&mut contents)
         .expect("PANIC: Unable to read input file contents.");
-    let mut module =
+    let module =
         hercules_ir::parse::parse(&contents).expect("PANIC: Failed to parse Hercules IR file.");
-    let (def_uses, reverse_postorders, _typing, _subgraphs, _doms, _postdoms, _fork_join_maps) =
-        hercules_ir::verify::verify(&mut module)
-            .expect("PANIC: Failed to verify Hercules IR module.");
 
-    let mut module = module.map(
-        |(mut function, id), (types, mut constants, dynamic_constants)| {
-            hercules_opt::ccp::ccp(
-                &mut function,
-                &types,
-                &mut constants,
-                &def_uses[id.idx()],
-                &reverse_postorders[id.idx()],
-            );
-            hercules_opt::dce::dce(&mut function);
-            function.delete_gravestones();
+    let mut pm = hercules_opt::pass::PassManager::new(module);
+    pm.add_pass(hercules_opt::pass::Pass::Verify);
+    pm.add_pass(hercules_opt::pass::Pass::CCP);
+    pm.add_pass(hercules_opt::pass::Pass::DCE);
+    pm.add_pass(hercules_opt::pass::Pass::GVN);
+    pm.add_pass(hercules_opt::pass::Pass::DCE);
+    pm.add_pass(hercules_opt::pass::Pass::Forkify);
+    pm.add_pass(hercules_opt::pass::Pass::DCE);
+    let mut module = pm.run_passes();
 
-            let def_use = hercules_ir::def_use::def_use(&function);
-            hercules_opt::gvn::gvn(&mut function, &constants, &def_use);
-            hercules_opt::dce::dce(&mut function);
-            function.delete_gravestones();
-
-            (function, (types, constants, dynamic_constants))
-        },
-    );
     let (def_uses, reverse_postorders, typing, subgraphs, doms, _postdoms, fork_join_maps) =
         hercules_ir::verify::verify(&mut module)
             .expect("PANIC: Failed to verify Hercules IR module.");
-
     let plans: Vec<_> = module
         .functions
         .iter()
-- 
GitLab