diff --git a/Cargo.lock b/Cargo.lock
index ed83f6be73b90235394dc221fa6f32222c8d1f4a..b5a72ec6d4576aedb78b75f5cf099017e9111c1a 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -177,7 +177,6 @@ name = "hercules_dot"
 version = "0.1.0"
 dependencies = [
  "clap",
- "hercules_cg",
  "hercules_ir",
  "hercules_opt",
  "rand",
@@ -209,6 +208,7 @@ dependencies = [
  "bitvec",
  "hercules_ir",
  "ordered-float",
+ "take_mut",
 ]
 
 [[package]]
@@ -347,6 +347,12 @@ dependencies = [
  "unicode-ident",
 ]
 
+[[package]]
+name = "take_mut"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60"
+
 [[package]]
 name = "tap"
 version = "1.0.1"
diff --git a/hercules_cg/src/lib.rs b/hercules_cg/src/lib.rs
index dbe405317d8bf07a17448ff9fc257c1df6f3c279..2d1293debc1f38fcb262e2dcfd69f38868df0269 100644
--- a/hercules_cg/src/lib.rs
+++ b/hercules_cg/src/lib.rs
@@ -1,7 +1,3 @@
-pub mod antideps;
 pub mod cpu_beta;
-pub mod gcm;
 
-pub use crate::antideps::*;
 pub use crate::cpu_beta::*;
-pub use crate::gcm::*;
diff --git a/hercules_cg/src/antideps.rs b/hercules_ir/src/antideps.rs
similarity index 76%
rename from hercules_cg/src/antideps.rs
rename to hercules_ir/src/antideps.rs
index 2ce6db83bef9687eda8e1b01cd16824e92562545..9dc3d1ee8b00360044ecf0ce99ad9f022df59693 100644
--- a/hercules_cg/src/antideps.rs
+++ b/hercules_ir/src/antideps.rs
@@ -1,14 +1,41 @@
-extern crate hercules_ir;
+use crate::*;
 
-use self::hercules_ir::def_use::*;
-use self::hercules_ir::ir::*;
+/*
+ * Top level function to get all anti-dependencies.
+ */
+pub fn antideps(function: &Function, def_use: &ImmutableDefUseMap) -> Vec<(NodeID, NodeID)> {
+    generic_antideps(
+        function,
+        def_use,
+        (0..function.nodes.len()).map(NodeID::new),
+    )
+}
+
+/*
+ * Sometimes, we are only interested in anti-dependence edges involving arrays.
+ */
+pub fn array_antideps(
+    function: &Function,
+    def_use: &ImmutableDefUseMap,
+    types: &Vec<Type>,
+    typing: &Vec<TypeID>,
+) -> Vec<(NodeID, NodeID)> {
+    generic_antideps(
+        function,
+        def_use,
+        (0..function.nodes.len())
+            .map(NodeID::new)
+            .filter(|id| types[typing[id.idx()].idx()].is_array()),
+    )
+}
 
 /*
- * Top level function to assemble anti-dependence edges. Returns a list of pairs
- * of nodes. The first item in the pair is the read node, and the second item is
- * the write node.
+ * Function to assemble anti-dependence edges. Returns a list of pairs of nodes.
+ * The first item in the pair is the read node, and the second item is the write
+ * node. Take an iterator of nodes in case we want a subset of all anti-
+ * dependencies.
  */
-pub fn antideps<I: Iterator<Item = NodeID>>(
+fn generic_antideps<I: Iterator<Item = NodeID>>(
     function: &Function,
     def_use: &ImmutableDefUseMap,
     nodes: I,
@@ -58,21 +85,3 @@ pub fn antideps<I: Iterator<Item = NodeID>>(
 
     antideps
 }
-
-/*
- * Sometimes, we are only interested in anti-dependence edges involving arrays.
- */
-pub fn array_antideps(
-    function: &Function,
-    def_use: &ImmutableDefUseMap,
-    types: &Vec<Type>,
-    typing: &Vec<TypeID>,
-) -> Vec<(NodeID, NodeID)> {
-    antideps(
-        function,
-        def_use,
-        (0..function.nodes.len())
-            .map(NodeID::new)
-            .filter(|id| types[typing[id.idx()].idx()].is_array()),
-    )
-}
diff --git a/hercules_ir/src/dom.rs b/hercules_ir/src/dom.rs
index e9cb07cd428fa006e3588076b9b94633c2c1e24a..67fa231e3fb2bd28b8a8961e43cc324eb0583315 100644
--- a/hercules_ir/src/dom.rs
+++ b/hercules_ir/src/dom.rs
@@ -18,6 +18,7 @@ pub struct DomChainIterator<'a> {
     dom: &'a DomTree,
     iter: Option<NodeID>,
     top: NodeID,
+    bottom: NodeID,
 }
 
 impl DomTree {
@@ -113,6 +114,7 @@ impl DomTree {
             dom: self,
             iter: Some(bottom),
             top,
+            bottom,
         }
     }
 
@@ -121,6 +123,7 @@ impl DomTree {
             dom: self,
             iter: Some(bottom),
             top: self.root,
+            bottom,
         }
     }
 
@@ -140,7 +143,10 @@ impl<'a> Iterator for DomChainIterator<'a> {
             } else if let Some(iter) = self.dom.imm_dom(iter) {
                 self.iter = Some(iter);
             } else {
-                panic!("In DomChainIterator, top node doesn't dominate bottom node.")
+                panic!(
+                    "In DomChainIterator, top node ({:?}) doesn't dominate bottom node ({:?}).",
+                    self.top, self.bottom
+                )
             }
             Some(ret)
         } else {
diff --git a/hercules_cg/src/gcm.rs b/hercules_ir/src/gcm.rs
similarity index 72%
rename from hercules_cg/src/gcm.rs
rename to hercules_ir/src/gcm.rs
index 1f840a5cfeb04722273d18365a1eea45a436d676..60e7935852fea297d6cce4b86d42edbbf635228a 100644
--- a/hercules_cg/src/gcm.rs
+++ b/hercules_ir/src/gcm.rs
@@ -1,13 +1,6 @@
-extern crate hercules_ir;
-
 use std::collections::HashMap;
 
-use self::hercules_ir::dataflow::*;
-use self::hercules_ir::def_use::*;
-use self::hercules_ir::dom::*;
-use self::hercules_ir::ir::*;
-use self::hercules_ir::loops::*;
-use self::hercules_ir::subgraph::*;
+use crate::*;
 
 /*
  * Top level global code motion function. Assigns each data node to one of its
@@ -18,10 +11,9 @@ pub fn gcm(
     function: &Function,
     def_use: &ImmutableDefUseMap,
     reverse_postorder: &Vec<NodeID>,
-    control_subgraph: &Subgraph,
     dom: &DomTree,
-    fork_join_map: &HashMap<NodeID, NodeID>,
     antideps: &Vec<(NodeID, NodeID)>,
+    loops: &LoopTree,
 ) -> Vec<NodeID> {
     // Step 1: find the immediate control uses and immediate control users of
     // each node.
@@ -51,10 +43,7 @@ pub fn gcm(
         immediate_control_users[write.idx()] = meet;
     }
 
-    // Step 2: calculate loop tree of function.
-    let loops = loops(&control_subgraph, NodeID::new(0), &dom, fork_join_map);
-
-    // Step 3: find most control dependent, shallowest loop level node for every
+    // Step 2: find most control dependent, shallowest loop level node for every
     // node.
     let bbs = (0..function.nodes.len())
         .map(|idx| {
@@ -64,25 +53,31 @@ pub fn gcm(
                 .common_ancestor(immediate_control_users[idx].nodes(function.nodes.len() as u32))
                 .unwrap_or(highest);
 
-            // Collect into vector to reverse, since we want to traverse down
-            // the dom tree, not up it.
-            let mut chain = dom
-                .chain(lowest, highest)
-                .collect::<Vec<_>>()
-                .into_iter()
-                .rev();
+            // If the ancestor of the control users isn't below the lowest
+            // control use, then just place in the loewst control use.
+            if !dom.does_dom(highest, lowest) {
+                highest
+            } else {
+                // Collect in vector to reverse, since we want to traverse down
+                // the dom tree, not up it.
+                let mut chain = dom
+                    .chain(lowest, highest)
+                    .collect::<Vec<_>>()
+                    .into_iter()
+                    .rev();
 
-            let mut location = chain.next().unwrap();
-            while let Some(control_node) = chain.next() {
-                // Traverse down the dom tree until we find a loop.
-                if loops.contains(control_node) {
-                    break;
-                } else {
-                    location = control_node;
+                let mut location = chain.next().unwrap();
+                while let Some(control_node) = chain.next() {
+                    // Traverse down the dom tree until we find a loop.
+                    if loops.contains(control_node) {
+                        break;
+                    } else {
+                        location = control_node;
+                    }
                 }
-            }
 
-            location
+                location
+            }
         })
         .collect();
 
diff --git a/hercules_ir/src/ir.rs b/hercules_ir/src/ir.rs
index 7efc7279497e99890c6adf0c16ca1eb2d7789170..466451791dde6ae771db61da484d010c7ed771db 100644
--- a/hercules_ir/src/ir.rs
+++ b/hercules_ir/src/ir.rs
@@ -584,11 +584,11 @@ impl Function {
  * Some analysis results can be updated after gravestone deletions.
  */
 pub trait GraveUpdatable {
-    fn map_gravestones(self, function: &Function, grave_mapping: &Vec<NodeID>) -> Self;
+    fn map_gravestones(self, grave_mapping: &Vec<NodeID>) -> Self;
 }
 
 impl<T: Clone> GraveUpdatable for Vec<T> {
-    fn map_gravestones(self, _function: &Function, grave_mapping: &Vec<NodeID>) -> Self {
+    fn map_gravestones(self, grave_mapping: &Vec<NodeID>) -> Self {
         let mut new_self = vec![];
         for (data, (idx, mapping)) in
             std::iter::zip(self.into_iter(), grave_mapping.iter().enumerate())
diff --git a/hercules_ir/src/lib.rs b/hercules_ir/src/lib.rs
index f402a788802407b051705b99e19a93ef5c778199..9da0276d98fac8e0bb5872a8724c7f601b50f782 100644
--- a/hercules_ir/src/lib.rs
+++ b/hercules_ir/src/lib.rs
@@ -1,10 +1,12 @@
 #![feature(coroutines, coroutine_trait, let_chains)]
 
+pub mod antideps;
 pub mod build;
 pub mod dataflow;
 pub mod def_use;
 pub mod dom;
 pub mod dot;
+pub mod gcm;
 pub mod ir;
 pub mod loops;
 pub mod parse;
@@ -13,11 +15,13 @@ pub mod subgraph;
 pub mod typecheck;
 pub mod verify;
 
+pub use crate::antideps::*;
 pub use crate::build::*;
 pub use crate::dataflow::*;
 pub use crate::def_use::*;
 pub use crate::dom::*;
 pub use crate::dot::*;
+pub use crate::gcm::*;
 pub use crate::ir::*;
 pub use crate::loops::*;
 pub use crate::parse::*;
diff --git a/hercules_ir/src/schedule.rs b/hercules_ir/src/schedule.rs
index 4221bd8d7fb194e7ecc6142dc623bd6a1416b68d..9881c3be351566ea7b1d22598fa5cbb5f34baddd 100644
--- a/hercules_ir/src/schedule.rs
+++ b/hercules_ir/src/schedule.rs
@@ -1,4 +1,5 @@
 use std::collections::HashMap;
+use std::collections::VecDeque;
 use std::iter::zip;
 
 use crate::*;
@@ -53,31 +54,122 @@ impl Plan {
 
         map
     }
-}
 
-impl GraveUpdatable for Plan {
     /*
      * Plans must be "repairable", in the sense that the IR that's referred to
      * may change after many passes. Since a plan is an explicit side data
      * structure, it must be updated after every change in the IR.
      */
-    fn map_gravestones(self, function: &Function, grave_mapping: &Vec<NodeID>) -> Self {
+    pub fn repair(self, function: &Function, grave_mapping: &Vec<NodeID>) -> Self {
+        // Unpack the plan.
+        let old_inverse_partition_map = self.invert_partition_map();
         let Plan {
             mut schedules,
-            partitions,
+            partitions: _,
             partition_devices,
-            num_partitions,
+            num_partitions: _,
         } = self;
 
         // Schedules of old nodes just get dropped. Since schedules don't hold
         // necessary semantic information, we are free to drop them arbitrarily.
-        schedules = schedules.map_gravestones(function, grave_mapping);
+        schedules = schedules.map_gravestones(grave_mapping);
         schedules.resize(function.nodes.len(), vec![]);
 
-        // Once we've repaired the plan, now we are free to try and infer new
-        // schedules about the nodes added by previous passes.
+        // Delete now empty partitions. First, filter out deleted nodes from the
+        // partitions and simultaneously map old node IDs to new node IDs. Then,
+        // filter out empty partitions.
+        let (new_inverse_partition_map, new_devices): (Vec<Vec<NodeID>>, Vec<Device>) =
+            zip(old_inverse_partition_map, partition_devices)
+                .into_iter()
+                .map(|(contents, device)| {
+                    (
+                        contents
+                            .into_iter()
+                            .filter_map(|id| {
+                                if id.idx() == 0 || grave_mapping[id.idx()].idx() != 0 {
+                                    Some(grave_mapping[id.idx()])
+                                } else {
+                                    None
+                                }
+                            })
+                            .collect::<Vec<NodeID>>(),
+                        device,
+                    )
+                })
+                .filter(|(contents, _)| !contents.is_empty())
+                .unzip();
+
+        // Calculate the number of nodes after deletion but before addition. Use
+        // this is iterate new nodes later.
+        let num_nodes_before_addition = new_inverse_partition_map.iter().flatten().count();
+        assert!(new_inverse_partition_map
+            .iter()
+            .flatten()
+            .all(|id| id.idx() < num_nodes_before_addition));
+
+        // Calculate the nodes that need to be assigned to a partition. This
+        // starts as just the nodes that have been added by passes.
+        let mut new_node_ids: VecDeque<NodeID> = (num_nodes_before_addition..function.nodes.len())
+            .map(NodeID::new)
+            .collect();
+
+        // Any partition no longer containing at least one control node needs to
+        // be liquidated.
+        let (new_inverse_partition_map, new_devices): (Vec<Vec<NodeID>>, Vec<Device>) =
+            zip(new_inverse_partition_map, new_devices)
+                .into_iter()
+                .filter_map(|(part, device)| {
+                    if part.iter().any(|id| function.nodes[id.idx()].is_control()) {
+                        Some((part, device))
+                    } else {
+                        // Nodes in removed partitions need to be re-partitioned.
+                        new_node_ids.extend(part);
+                        None
+                    }
+                })
+                .unzip();
+
+        // Assign the node IDs that need to be partitioned to partitions. In the
+        // process, construct a map from node ID to partition ID.
+        let mut node_id_to_partition_id: HashMap<NodeID, PartitionID> = new_inverse_partition_map
+            .into_iter()
+            .enumerate()
+            .map(|(partition_idx, node_ids)| {
+                node_ids
+                    .into_iter()
+                    .map(|node_id| (node_id, PartitionID::new(partition_idx)))
+                    .collect::<Vec<(NodeID, PartitionID)>>()
+            })
+            .flatten()
+            .collect();
+
+        // Make a best effort to assign nodes to the partition of one of their
+        // uses. Prioritize earlier uses. TODO: since not all partitions are
+        // legal, this is almost certainly not complete. Think more about that.
+        'workloop: while let Some(id) = new_node_ids.pop_front() {
+            for u in get_uses(&function.nodes[id.idx()]).as_ref() {
+                if let Some(partition_id) = node_id_to_partition_id.get(u) {
+                    node_id_to_partition_id.insert(id, *partition_id);
+                    continue 'workloop;
+                }
+            }
+            new_node_ids.push_back(id);
+        }
+
+        // Reconstruct the partitions vector.
+        let num_partitions = new_devices.len();
+        let mut partitions = vec![PartitionID::new(0); function.nodes.len()];
+        for (k, v) in node_id_to_partition_id {
+            partitions[k.idx()] = v;
+        }
 
-        todo!()
+        // Reconstruct the whole plan.
+        Plan {
+            schedules,
+            partitions,
+            partition_devices: new_devices,
+            num_partitions,
+        }
     }
 }
 
diff --git a/hercules_opt/Cargo.toml b/hercules_opt/Cargo.toml
index b1f2b468c5f0a81646187ce5ff4ccd8ef2454437..7743a73ff61bad93e473f5a169cf97c95edd71e7 100644
--- a/hercules_opt/Cargo.toml
+++ b/hercules_opt/Cargo.toml
@@ -6,4 +6,5 @@ authors = ["Russel Arbore <rarbore2@illinois.edu>"]
 [dependencies]
 ordered-float = "*"
 bitvec = "*"
+take_mut = "*"
 hercules_ir = { path = "../hercules_ir" }
diff --git a/hercules_opt/src/lib.rs b/hercules_opt/src/lib.rs
index 53ebc1da7f99639e93ab57f192a3d5dc389aadd6..a82e7247e9b979ef2b6790cfa06779332b0d9872 100644
--- a/hercules_opt/src/lib.rs
+++ b/hercules_opt/src/lib.rs
@@ -1,3 +1,5 @@
+#![feature(let_chains)]
+
 pub mod ccp;
 pub mod dce;
 pub mod forkify;
diff --git a/hercules_opt/src/pass.rs b/hercules_opt/src/pass.rs
index d94de66e8dc022a48f5b69374205063c00a339d2..31a681bfbd660b353630dda7201e0f0f3b53f856 100644
--- a/hercules_opt/src/pass.rs
+++ b/hercules_opt/src/pass.rs
@@ -1,14 +1,18 @@
 extern crate hercules_ir;
+extern crate take_mut;
 
 use std::collections::HashMap;
 use std::iter::zip;
 
+use self::hercules_ir::antideps::*;
 use self::hercules_ir::dataflow::*;
 use self::hercules_ir::def_use::*;
 use self::hercules_ir::dom::*;
 use self::hercules_ir::dot::*;
+use self::hercules_ir::gcm::*;
 use self::hercules_ir::ir::*;
 use self::hercules_ir::loops::*;
+use self::hercules_ir::schedule::*;
 use self::hercules_ir::subgraph::*;
 use self::hercules_ir::typecheck::*;
 use self::hercules_ir::verify::*;
@@ -26,7 +30,7 @@ pub enum Pass {
     Forkify,
     Predication,
     Verify,
-    Xdot,
+    Xdot(bool),
 }
 
 /*
@@ -41,14 +45,20 @@ pub struct PassManager {
     passes: Vec<Pass>,
 
     // Cached analysis results.
-    def_uses: Option<Vec<ImmutableDefUseMap>>,
-    reverse_postorders: Option<Vec<Vec<NodeID>>>,
-    typing: Option<ModuleTyping>,
-    control_subgraphs: Option<Vec<Subgraph>>,
-    doms: Option<Vec<DomTree>>,
-    postdoms: Option<Vec<DomTree>>,
-    fork_join_maps: Option<Vec<HashMap<NodeID, NodeID>>>,
-    loops: Option<Vec<LoopTree>>,
+    pub def_uses: Option<Vec<ImmutableDefUseMap>>,
+    pub reverse_postorders: Option<Vec<Vec<NodeID>>>,
+    pub typing: Option<ModuleTyping>,
+    pub control_subgraphs: Option<Vec<Subgraph>>,
+    pub doms: Option<Vec<DomTree>>,
+    pub postdoms: Option<Vec<DomTree>>,
+    pub fork_join_maps: Option<Vec<HashMap<NodeID, NodeID>>>,
+    pub fork_join_nests: Option<Vec<HashMap<NodeID, Vec<NodeID>>>>,
+    pub loops: Option<Vec<LoopTree>>,
+    pub antideps: Option<Vec<Vec<(NodeID, NodeID)>>>,
+    pub bbs: Option<Vec<Vec<NodeID>>>,
+
+    // Current plan. Keep track of the last time the plan was updated.
+    pub plans: Option<Vec<Plan>>,
 }
 
 impl PassManager {
@@ -63,7 +73,11 @@ impl PassManager {
             doms: None,
             postdoms: None,
             fork_join_maps: None,
+            fork_join_nests: None,
             loops: None,
+            antideps: None,
+            bbs: None,
+            plans: None,
         }
     }
 
@@ -71,13 +85,13 @@ impl PassManager {
         self.passes.push(pass);
     }
 
-    fn make_def_uses(&mut self) {
+    pub fn make_def_uses(&mut self) {
         if self.def_uses.is_none() {
             self.def_uses = Some(self.module.functions.iter().map(def_use).collect());
         }
     }
 
-    fn make_reverse_postorders(&mut self) {
+    pub fn make_reverse_postorders(&mut self) {
         if self.reverse_postorders.is_none() {
             self.make_def_uses();
             self.reverse_postorders = Some(
@@ -91,7 +105,7 @@ impl PassManager {
         }
     }
 
-    fn make_typing(&mut self) {
+    pub fn make_typing(&mut self) {
         if self.typing.is_none() {
             self.make_reverse_postorders();
             self.typing = Some(
@@ -100,7 +114,7 @@ impl PassManager {
         }
     }
 
-    fn make_control_subgraphs(&mut self) {
+    pub fn make_control_subgraphs(&mut self) {
         if self.control_subgraphs.is_none() {
             self.make_def_uses();
             self.control_subgraphs = Some(
@@ -111,7 +125,7 @@ impl PassManager {
         }
     }
 
-    fn make_doms(&mut self) {
+    pub fn make_doms(&mut self) {
         if self.doms.is_none() {
             self.make_control_subgraphs();
             self.doms = Some(
@@ -125,7 +139,7 @@ impl PassManager {
         }
     }
 
-    fn make_postdoms(&mut self) {
+    pub fn make_postdoms(&mut self) {
         if self.postdoms.is_none() {
             self.make_control_subgraphs();
             self.postdoms = Some(
@@ -139,7 +153,7 @@ impl PassManager {
         }
     }
 
-    fn make_fork_join_maps(&mut self) {
+    pub fn make_fork_join_maps(&mut self) {
         if self.fork_join_maps.is_none() {
             self.make_typing();
             self.fork_join_maps = Some(
@@ -153,7 +167,27 @@ impl PassManager {
         }
     }
 
-    fn make_loops(&mut self) {
+    pub fn make_fork_join_nests(&mut self) {
+        if self.fork_join_nests.is_none() {
+            self.make_doms();
+            self.make_fork_join_maps();
+            self.fork_join_nests = Some(
+                zip(
+                    self.module.functions.iter(),
+                    zip(
+                        self.doms.as_ref().unwrap().iter(),
+                        self.fork_join_maps.as_ref().unwrap().iter(),
+                    ),
+                )
+                .map(|(function, (dom, fork_join_map))| {
+                    compute_fork_join_nesting(function, dom, fork_join_map)
+                })
+                .collect(),
+            );
+        }
+    }
+
+    pub fn make_loops(&mut self) {
         if self.loops.is_none() {
             self.make_control_subgraphs();
             self.make_doms();
@@ -171,7 +205,72 @@ impl PassManager {
         }
     }
 
-    pub fn run_passes(mut self) -> Module {
+    pub fn make_antideps(&mut self) {
+        if self.antideps.is_none() {
+            self.make_def_uses();
+            self.antideps = Some(
+                zip(
+                    self.def_uses.as_ref().unwrap().iter(),
+                    self.module.functions.iter(),
+                )
+                .map(|(def_use, function)| antideps(function, def_use))
+                .collect(),
+            );
+        }
+    }
+
+    pub fn make_bbs(&mut self) {
+        if self.antideps.is_none() {
+            self.make_def_uses();
+            self.make_reverse_postorders();
+            self.make_doms();
+            self.make_antideps();
+            self.make_loops();
+            let def_uses = self.def_uses.as_ref().unwrap().iter();
+            let reverse_postorders = self.reverse_postorders.as_ref().unwrap().iter();
+            let doms = self.doms.as_ref().unwrap().iter();
+            let antideps = self.antideps.as_ref().unwrap().iter();
+            let loops = self.loops.as_ref().unwrap().iter();
+            self.bbs = Some(
+                zip(
+                    self.module.functions.iter(),
+                    zip(
+                        def_uses,
+                        zip(reverse_postorders, zip(doms, zip(antideps, loops))),
+                    ),
+                )
+                .map(
+                    |(function, (def_use, (reverse_postorder, (dom, (antideps, loops)))))| {
+                        gcm(function, def_use, reverse_postorder, dom, antideps, loops)
+                    },
+                )
+                .collect(),
+            );
+        }
+    }
+
+    pub fn make_plans(&mut self) {
+        if self.plans.is_none() {
+            self.make_reverse_postorders();
+            self.make_fork_join_maps();
+            self.make_bbs();
+            let reverse_postorders = self.reverse_postorders.as_ref().unwrap().iter();
+            let fork_join_maps = self.fork_join_maps.as_ref().unwrap().iter();
+            let bbs = self.bbs.as_ref().unwrap().iter();
+            self.plans = Some(
+                zip(
+                    self.module.functions.iter(),
+                    zip(reverse_postorders, zip(fork_join_maps, bbs)),
+                )
+                .map(|(function, (reverse_postorder, (fork_join_map, bb)))| {
+                    default_plan(function, reverse_postorder, fork_join_map, bb)
+                })
+                .collect(),
+            );
+        }
+    }
+
+    pub fn run_passes(&mut self) {
         for pass in self.passes.clone().iter() {
             match pass {
                 Pass::DCE => {
@@ -225,10 +324,12 @@ impl PassManager {
                     self.make_reverse_postorders();
                     self.make_doms();
                     self.make_fork_join_maps();
+                    self.make_plans();
                     let def_uses = self.def_uses.as_ref().unwrap();
                     let reverse_postorders = self.reverse_postorders.as_ref().unwrap();
                     let doms = self.doms.as_ref().unwrap();
                     let fork_join_maps = self.fork_join_maps.as_ref().unwrap();
+                    let plans = self.plans.as_ref().unwrap();
                     for idx in 0..self.module.functions.len() {
                         predication(
                             &mut self.module.functions[idx],
@@ -236,7 +337,7 @@ impl PassManager {
                             &reverse_postorders[idx],
                             &doms[idx],
                             &fork_join_maps[idx],
-                            &vec![],
+                            &plans[idx].schedules,
                         )
                     }
                 }
@@ -262,28 +363,43 @@ impl PassManager {
                     self.postdoms = Some(postdoms);
                     self.fork_join_maps = Some(fork_join_maps);
 
-                    // Verification doesn't require clearing analysis results.
+                    // Verify doesn't require clearing analysis results.
                     continue;
                 }
-                Pass::Xdot => {
+                Pass::Xdot(force_analyses) => {
                     self.make_reverse_postorders();
+                    if *force_analyses {
+                        self.make_doms();
+                        self.make_fork_join_maps();
+                        self.make_plans();
+                    }
                     xdot_module(
                         &self.module,
                         self.reverse_postorders.as_ref().unwrap(),
                         self.doms.as_ref(),
                         self.fork_join_maps.as_ref(),
-                        None,
+                        self.plans.as_ref(),
                     );
+
+                    // Xdot doesn't require clearing analysis results.
+                    continue;
                 }
             }
 
+            // Cleanup the module after passes. Delete gravestone nodes. Repair
+            // the plans. Clear out-of-date analyses.
             for idx in 0..self.module.functions.len() {
-                self.module.functions[idx].delete_gravestones();
+                let grave_mapping = self.module.functions[idx].delete_gravestones();
+                let plans = &mut self.plans;
+                let functions = &self.module.functions;
+                if let Some(plans) = plans.as_mut() {
+                    take_mut::take(&mut plans[idx], |plan| {
+                        plan.repair(&functions[idx], &grave_mapping)
+                    });
+                }
             }
             self.clear_analyses();
         }
-
-        self.module
     }
 
     fn clear_analyses(&mut self) {
@@ -295,5 +411,13 @@ impl PassManager {
         self.postdoms = None;
         self.fork_join_maps = None;
         self.loops = None;
+        self.antideps = None;
+        self.bbs = None;
+
+        // Don't clear the plan - this is repaired, not reconstructed.
+    }
+
+    pub fn get_module(self) -> Module {
+        self.module
     }
 }
diff --git a/hercules_tools/hercules_cpu/src/main.rs b/hercules_tools/hercules_cpu/src/main.rs
index 32c4e0642be46e86a4ab957bce22baf7fde2d4d8..c1b66edece5e3fd75e14b4b25c6f02d5c3647463 100644
--- a/hercules_tools/hercules_cpu/src/main.rs
+++ b/hercules_tools/hercules_cpu/src/main.rs
@@ -35,50 +35,27 @@ fn main() {
     pm.add_pass(hercules_opt::pass::Pass::DCE);
     pm.add_pass(hercules_opt::pass::Pass::Forkify);
     pm.add_pass(hercules_opt::pass::Pass::DCE);
-    let mut module = pm.run_passes();
+    pm.add_pass(hercules_opt::pass::Pass::Predication);
+    pm.add_pass(hercules_opt::pass::Pass::DCE);
 
-    let (def_uses, reverse_postorders, typing, subgraphs, doms, _postdoms, fork_join_maps) =
-        hercules_ir::verify::verify(&mut module)
-            .expect("PANIC: Failed to verify Hercules IR module.");
-    let antideps: Vec<_> = module
-        .functions
-        .iter()
-        .enumerate()
-        .map(|(idx, function)| {
-            hercules_cg::antideps::array_antideps(
-                function,
-                &def_uses[idx],
-                &module.types,
-                &typing[idx],
-            )
-        })
-        .collect();
+    pm.run_passes();
+    pm.make_typing();
+    pm.make_reverse_postorders();
+    pm.make_def_uses();
+    pm.make_bbs();
+    pm.make_antideps();
+    pm.make_fork_join_maps();
+    pm.make_fork_join_nests();
 
-    let bbs: Vec<_> = module
-        .functions
-        .iter()
-        .enumerate()
-        .map(|(idx, function)| {
-            hercules_cg::gcm::gcm(
-                function,
-                &def_uses[idx],
-                &reverse_postorders[idx],
-                &subgraphs[idx],
-                &doms[idx],
-                &fork_join_maps[idx],
-                &antideps[idx],
-            )
-        })
-        .collect();
+    let typing = pm.typing.as_ref().unwrap().clone();
+    let reverse_postorders = pm.reverse_postorders.as_ref().unwrap().clone();
+    let def_uses = pm.def_uses.as_ref().unwrap().clone();
+    let bbs = pm.bbs.as_ref().unwrap().clone();
+    let antideps = pm.antideps.as_ref().unwrap().clone();
+    let fork_join_maps = pm.fork_join_maps.as_ref().unwrap().clone();
+    let fork_join_nests = pm.fork_join_nests.as_ref().unwrap().clone();
 
-    let fork_join_nests: Vec<_> = module
-        .functions
-        .iter()
-        .enumerate()
-        .map(|(idx, function)| {
-            hercules_cg::gcm::compute_fork_join_nesting(function, &doms[idx], &fork_join_maps[idx])
-        })
-        .collect();
+    let module = pm.get_module();
 
     let mut file = File::create("test.ll").unwrap();
     let mut contents = String::new();
diff --git a/hercules_tools/hercules_dot/Cargo.toml b/hercules_tools/hercules_dot/Cargo.toml
index 078baea93996fe410ba922b725e684d78362cac8..f2b42c0ea2bf18fdca6dc00b33af17302c19f145 100644
--- a/hercules_tools/hercules_dot/Cargo.toml
+++ b/hercules_tools/hercules_dot/Cargo.toml
@@ -7,5 +7,4 @@ authors = ["Russel Arbore <rarbore2@illinois.edu>"]
 clap = { version = "*", features = ["derive"] }
 hercules_ir = { path = "../../hercules_ir" }
 hercules_opt = { path = "../../hercules_opt" }
-hercules_cg = { path = "../../hercules_cg" }
 rand = "*"
diff --git a/hercules_tools/hercules_dot/src/main.rs b/hercules_tools/hercules_dot/src/main.rs
index 425b9478399cea4a753abb5726b527bb03c271f6..013ba87b7b11233b1f67fe9c0b63b91dfd5bfc44 100644
--- a/hercules_tools/hercules_dot/src/main.rs
+++ b/hercules_tools/hercules_dot/src/main.rs
@@ -38,49 +38,27 @@ fn main() {
     pm.add_pass(hercules_opt::pass::Pass::DCE);
     pm.add_pass(hercules_opt::pass::Pass::Predication);
     pm.add_pass(hercules_opt::pass::Pass::DCE);
-    let mut module = pm.run_passes();
-
-    let (def_uses, reverse_postorders, typing, subgraphs, doms, _postdoms, fork_join_maps) =
-        hercules_ir::verify::verify(&mut module)
-            .expect("PANIC: Failed to verify Hercules IR module.");
-    let plans: Vec<_> = module
-        .functions
-        .iter()
-        .enumerate()
-        .map(|(idx, function)| {
-            hercules_ir::schedule::default_plan(
-                function,
-                &reverse_postorders[idx],
-                &fork_join_maps[idx],
-                &hercules_cg::gcm::gcm(
-                    function,
-                    &def_uses[idx],
-                    &reverse_postorders[idx],
-                    &subgraphs[idx],
-                    &doms[idx],
-                    &fork_join_maps[idx],
-                    &hercules_cg::antideps::array_antideps(
-                        function,
-                        &def_uses[idx],
-                        &module.types,
-                        &typing[idx],
-                    ),
-                ),
-            )
-        })
-        .collect();
 
     if args.output.is_empty() {
-        hercules_ir::dot::xdot_module(
-            &module,
-            &reverse_postorders,
-            Some(&doms),
-            Some(&fork_join_maps),
-            Some(&plans),
-        );
+        pm.add_pass(hercules_opt::pass::Pass::Xdot(true));
+        pm.run_passes();
     } else {
         let mut file = File::create(args.output).expect("PANIC: Unable to open output file.");
         let mut contents = String::new();
+
+        pm.run_passes();
+        pm.make_reverse_postorders();
+        pm.make_doms();
+        pm.make_fork_join_maps();
+        pm.make_plans();
+
+        let reverse_postorders = pm.reverse_postorders.as_ref().unwrap().clone();
+        let doms = pm.doms.as_ref().unwrap().clone();
+        let fork_join_maps = pm.fork_join_maps.as_ref().unwrap().clone();
+        let plans = pm.plans.as_ref().unwrap().clone();
+
+        let module = pm.get_module();
+
         hercules_ir::dot::write_dot(
             &module,
             &reverse_postorders,