diff --git a/llvm/projects/hpvm-tensor-rt/build_pldi/table_generator.py b/llvm/projects/hpvm-tensor-rt/build_pldi/table_generator.py
index 85895d7978dbc7ede8f805ca3053a483b9f7906a..e7177d1ca2d632a629178926f23a7827a1c71564 100644
--- a/llvm/projects/hpvm-tensor-rt/build_pldi/table_generator.py
+++ b/llvm/projects/hpvm-tensor-rt/build_pldi/table_generator.py
@@ -48,24 +48,11 @@ class TableGenerator:
         3. Writes the internal table to <network_name>_tensors.txt file and uses the 
         <network_name>_ops.txt file as a guideline in terms of row order 
 		'''
-        self.__run_inputted_binaries()
+        #self.__run_inputted_binaries()
         self.__build_internal_table()
         self.__output_table_to_file()
 
 
-    def __should_execute_file(self, file_path):
-        '''
-        Checks if the file at the given file path is a binary that should be run
-        by the profiler. Must exist, be a binary, and must start with the network
-        name as per our naming standards.
-
-        Args:
-            file_path:          Path of the file to check 
-        '''
-        return os.path.isfile(file_path) and os.access(file_path, os.X_OK) and \
-                file_path.find(self.__network_name) != -1
-
-
     def __run_inputted_binaries(self):
         '''
         Invokes the profiler to run all appropriate binaries (must start with the network 
@@ -99,63 +86,6 @@ class TableGenerator:
                         output_file]).communicate()
 
 
-    def __get_approximation_type(self, results_filename):
-        '''
-        Parses a given results filename for the approximation type. 
-        Format assumption: <network_name>_<approx_type>.txt
-            
-        Args:
-            results_filename:      Name of results file
-
-        Returns:
-            the approximation technique (ex: fp16) 
-        '''
-        approx_type_start_ind = results_filename.find(self.__network_name) \
-                + len(self.__network_name) + 1 # + 1 to account for _ delimiter
-        approx_type_end_ind = results_filename.find(".txt")
-        return results_filename[approx_type_start_ind : approx_type_end_ind] 
-   
-
-    def __parse_tensor_operation_line(self, tensor_op_line):
-        '''
-        Parses a tensor operation line (within a output file from the offline
-        profiler for the operation name, the total time used, and the total
-        energy used
-
-        Args:
-            tensor_op_line:        Tensor operation line from output file
-
-        Returns:
-            operation name
-            total time used
-            total energy used
-        '''
-        line_as_list = tensor_op_line.split(",")
-        return line_as_list[0], line_as_list[1], line_as_list[2] 
-
-
-    def __build_nested_default_dict(self):
-        '''
-        Builds a nested default dictionary with an arbitrary number of levels
-        '''
-        return defaultdict(self.__build_nested_default_dict)
-
-    def __get_original_operation_name(self, op_name):
-        '''
-        Parses an operation name containing _<conversion type> for the original
-        operation name.
-        Format assumption: <original_op_name>_<conversion type>
-
-        Args:
-            op_name:        Name of the operation
-        
-        Returns:
-            the original operation name 
-        '''
-        underscore_ind = op_name.find("_")
-        return op_name[ : underscore_ind], op_name[underscore_ind + 1 : ]
-
-
     def __build_internal_table(self):
         '''
         Iterates through each results file generated by the runs of the offline
@@ -180,13 +110,13 @@ class TableGenerator:
                     # Get the original operation name (without the f2h/h2f) and the conversion type 
                     orig_op_name, conversion_type = self.__get_original_operation_name(op_name)
 
-                    if orig_op_name not in self.__table: 
+                    if orig_op_name not in self.__table:
                         print("ERROR: Conversion found but original %s is not in the table" % orig_op_name)
                         exit(1)
 
                     # Store f2h and h2f as columns in the row belonging to the original operation
                     self.__table[orig_op_name][approx_type]["time"] = total_time
-                    self.__table[orig_op_name][approx_type]["energy"] = total_energy 
+                    self.__table[orig_op_name][approx_type]["energy"] = total_energy
 
                 # Create a new row in the dictionary
                 else:
@@ -206,7 +136,7 @@ class TableGenerator:
         time and the energy
         '''
         table_file_path = os.path.join(self.__results_dir_path, self.__table_filename)
-        soc_operations_file_name = os.path.join("home", "nvidia", "soc_simulator", \
+        soc_operations_file_name = os.path.join("/", "home", "nvidia", "soc_simulator", \
                         "%s_cifar10" % self.__network_name, "%s_ops.txt" % self.__network_name)
 
         soc_operations_file = open(soc_operations_file_name, "r")
@@ -217,7 +147,7 @@ class TableGenerator:
         while curr_line:
             # First line is always the layers line (#layer_name,num_ops)
             layer_name, num_ops = self.__parse_layer_info_line(curr_line)
-           
+
             # List of strings, where each string is a row corresponding to an operation
             # in the layer
             ops_in_layer = []
@@ -227,14 +157,14 @@ class TableGenerator:
             # CRITICAL ASSUMPTION: All operations within a layer have the same # columns
             # or everything breaks bc the header is per layer, not per operation
             header = ["**", layer_name, str(num_ops)]
-           
+
             # Iterate through all operations within the layer 
-            for op_in_layer_count in range(num_ops): 
+            for op_in_layer_count in range(num_ops):
                 # Contains the operation name 
                 curr_line = soc_operations_file.readline().strip()
 
                 # Stores a list of elements that will be joined to make up a row 
-                curr_op = [curr_line] 
+                curr_op = [curr_line]
                 operation_data = self.__table[curr_line]
 
                 # Iterate through time/energy data for each approximation type corresponding
@@ -247,7 +177,7 @@ class TableGenerator:
                     curr_op.append(op_energy)
 
                     if op_in_layer_count == 0:
-                        header.append("%s_time" % approx_type)    
+                        header.append("%s_time" % approx_type)
                         header.append("%s_energy" % approx_type)
 
                 ops_in_layer.append(' '.join(curr_op))
@@ -258,6 +188,77 @@ class TableGenerator:
 
             curr_line = soc_operations_file.readline().strip()
 
+
+    def __should_execute_file(self, file_path):
+        '''
+        Checks if the file at the given file path is a binary that should be run
+        by the profiler. Must exist, be a binary, and must start with the network
+        name as per our naming standards.
+
+        Args:
+            file_path:          Path of the file to check 
+        '''
+        return os.path.isfile(file_path) and os.access(file_path, os.X_OK) and \
+                file_path.find(self.__network_name) != -1
+
+
+    def __get_approximation_type(self, results_filename):
+        '''
+        Parses a given results filename for the approximation type. 
+        Format assumption: <network_name>_<approx_type>.txt
+            
+        Args:
+            results_filename:      Name of results file
+
+        Returns:
+            the approximation technique (ex: fp16) 
+        '''
+        approx_type_start_ind = results_filename.find(self.__network_name) \
+                + len(self.__network_name) + 1 # + 1 to account for _ delimiter
+        approx_type_end_ind = results_filename.find(".txt")
+        return results_filename[approx_type_start_ind : approx_type_end_ind] 
+   
+
+    def __parse_tensor_operation_line(self, tensor_op_line):
+        '''
+        Parses a tensor operation line (within a output file from the offline
+        profiler for the operation name, the total time used, and the total
+        energy used
+
+        Args:
+            tensor_op_line:        Tensor operation line from output file
+
+        Returns:
+            operation name
+            total time used
+            total energy used
+        '''
+        line_as_list = tensor_op_line.split(",")
+        return line_as_list[0], line_as_list[1], line_as_list[2] 
+
+
+    def __build_nested_default_dict(self):
+        '''
+        Builds a nested default dictionary with an arbitrary number of levels
+        '''
+        return defaultdict(self.__build_nested_default_dict)
+
+    def __get_original_operation_name(self, op_name):
+        '''
+        Parses an operation name containing _<conversion type> for the original
+        operation name.
+        Format assumption: <original_op_name>_<conversion type>
+
+        Args:
+            op_name:        Name of the operation
+        
+        Returns:
+            the original operation name 
+        '''
+        underscore_ind = op_name.find("_")
+        return op_name[ : underscore_ind], op_name[underscore_ind + 1 : ]
+
+
     def __parse_layer_info_line(self, layer_info_line): #layer_name,num_ops
         '''
         Parses a layer header (from the original ops.txt file) into the layer name