Skip to content
Snippets Groups Projects
Commit ac6d7146 authored by Elizabeth's avatar Elizabeth
Browse files

Started adding pydoc

parent f24a8d81
No related branches found
No related tags found
No related merge requests found
......@@ -6,59 +6,102 @@ import shutil
from collections import defaultdict
class TableGenerator:
'''
Stores all precision conversions used.
'''
precision_conversions = frozenset(["h2f", "f2h"])
def __init__(self, dir_name, iters, profiler_binary_name):
self.__dir_name = dir_name
def __init__(self, dir_path, iters, profiler_binary_name):
'''
Args:
dir_path: Path of directory containing network binaries
iters: Number of iterations to run each binary for
profiler_binary_name: Name of offline profiler binary to run
'''
self.__dir_path = dir_path
# directory/path/network_name (last item in path)
self.__network_name = os.path.split(dir_name)[-1]
# Name of the actual directory
self.__network_name = os.path.split(dir_path)[-1]
self.__iters = iters
self.__profiler_binary_name = profiler_binary_name
self.__results_dir_name = "%s_results" % self.__dir_name
# Path to results directory
self.__results_dir_path = "%s_results" % self.__dir_path
# Outputted table file
self.__table_filename = "%s_tensors.txt" % self.__network_name
def __is_binary(self, file_path):
# Binary name must start with the network name as per our naming standards
# Nested default dictionary of default dicts
self.__table = self.__build_nested_default_dict()
def generate_table(self):
'''
Generates a table file called <network_name>_tensors.txt in the
'''
self.__build_internal_table()
self.__output_table()
def __should_execute_file(self, file_path):
'''
Checks if the file at the given file path is a binary that should be run
by the profiler. Must exist, be a binary, and must start with the network
name as per our naming standards.
Args:
file_path: Path of the file to check
'''
return os.path.isfile(file_path) and os.access(file_path, os.X_OK) and \
file_path.find(self.__network_name) != -1
def run_binaries_in_input_dir(self):
if not os.path.isdir(self.__dir_name):
print("ERROR: Directory %s not found" % self.__dir_name)
def run_inputted_binaries(self):
'''
Invokes the profiler to run all appropriate binaries (must start with the network
name) in the inputted directory. Result files generated by the profiler are
stored in the results file directory and are named <binary_name>.txt. These results
files are then parsed in a later step to generate the table
'''
if not os.path.isdir(self.__dir_path):
print("ERROR: Directory %s not found" % self.__dir_path)
exit(1)
try:
os.mkdir(self.__results_dir_name)
os.mkdir(self.__results_dir_path)
except OSError:
if os.path.isdir(self.__results_dir_name):
if os.path.isdir(self.__results_dir_path):
print("Directory already exists. Clearing directory.")
for old_file in glob.glob(os.path.join(self.__results_dir_name, "*")):
for old_file in glob.glob(os.path.join(self.__results_dir_path, "*")):
os.remove(old_file)
else:
print("ERROR: Directory doesn't exist but failed to create dir")
for binary_name in os.listdir(self.__dir_name):
binary_path = os.path.join(self.__dir_name, binary_name)
for binary_name in os.listdir(self.__dir_path):
binary_path = os.path.join(self.__dir_path, binary_name)
if not self.__is_binary(binary_path):
if not self.__should_execute_file(binary_path):
continue
if not os.path.isfile(binary_path):
print("ERROR: Binary %s not found" % binary_path)
exit(1)
output_file = os.path.join(self.__results_dir_name, binary_name + ".txt")
output_file = os.path.join(self.__results_dir_path, binary_name + ".txt")
# No stdout/stderr piping needed for now
subprocess.Popen([profiler_binary_name, binary_path, str(self.__iters), \
output_file]).communicate()
def __get_approximation_type(self, results_filename):
'''
Parses a given results filename for the approximation type.
Format assumption: <network_name>_<approx_type>.txt
Args:
results_filename: Name of results file
Returns:
the approximation technique (ex: fp16)
'''
approx_type_start_ind = results_filename.find(self.__network_name) \
+ len(self.__network_name) + 1 # + 1 to account for _ delimiter
approx_type_end_ind = results_filename.find(".txt")
......@@ -66,34 +109,54 @@ class TableGenerator:
def __parse_tensor_operation_line(self, tensor_op_line):
print(tensor_op_line)
'''
Parses a tensor operation line (within a output file from the offline
profiler for the operation name, the total time used, and the total
energy used
Args:
tensor_op_line: Tensor operation line from output file
Returns:
operation name
total time used
total energy used
'''
line_as_list = tensor_op_line.split(",")
return line_as_list[0], line_as_list[1], line_as_list[2]
def __build_nested_default_dict(self):
'''
Builds a nested default dictionary with an arbitrary number of levels
'''
return defaultdict(self.__build_nested_default_dict)
# h2f or f2h
def __get_original_operation_name(self, op_name):
'''
Parses an operation name containing _<conversion type> for the original
operation name.
Format assumption: <original_op_name>_<conversion type>
Args:
op_name: Name of the operation
Returns:
the original operation name
'''
underscore_ind = op_name.find("_")
return op_name[ : underscore_ind], op_name[underscore_ind + 1 : ]
def generate_table(self):
self.__table = self.__build_nested_default_dict()
self.__build_internal_table()
self.__output_table()
def __build_internal_table(self):
for results_file_name in os.listdir(self.__results_dir_name):
for results_file_name in os.listdir(self.__results_dir_path):
# Ignore if it's not a results file
if results_file_name == self.__table_filename or \
not results_file_name.startswith(self.__network_name):
continue
approx_type = self.__get_approximation_type(results_file_name)
results_file = open(os.path.join(self.__results_dir_name, results_file_name), "r")
results_file = open(os.path.join(self.__results_dir_path, results_file_name), "r")
for line in results_file:
line = line.strip()
......@@ -122,7 +185,7 @@ class TableGenerator:
results_file.close()
def __output_table(self):
table_file_path = os.path.join(self.__results_dir_name, self.__table_filename)
table_file_path = os.path.join(self.__results_dir_path, self.__table_filename)
# TODO un hard code this
soc_operations_file_name = os.path.join("/home/nvidia/soc_simulator", "%s_cifar10" % self.__network_name, "%s_ops.txt" % self.__network_name)
......@@ -184,9 +247,9 @@ class TableGenerator:
# Try doing this per layer first
pass
binary_dir_name = "/home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/build_pldi/mobilenet"
binary_dir_path = "/home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/build_pldi/mobilenet"
num_iters = 1
profiler_binary_name = "/home/nvidia/awesome_profiler/pp"
table_gen = TableGenerator(binary_dir_name, num_iters, profiler_binary_name)
#table_gen.run_binaries_in_input_dir()
table_gen.generate_table()
table_gen = TableGenerator(binary_dir_path, num_iters, profiler_binary_name)
#table_gen.run_inputted_binaries()
#table_gen.generate_table()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment