diff --git a/llvm/projects/hpvm-tensor-rt/code_autogenerators/benchmark_testing_automator.py b/llvm/projects/hpvm-tensor-rt/code_autogenerators/benchmark_testing_automator.py index 5f5c28032d721dcf1e77ab52407a165c0251deb2..48320ca197a497f44d164a1366128fbeff2b7352 100644 --- a/llvm/projects/hpvm-tensor-rt/code_autogenerators/benchmark_testing_automator.py +++ b/llvm/projects/hpvm-tensor-rt/code_autogenerators/benchmark_testing_automator.py @@ -27,17 +27,37 @@ def parse_binary_output(proc_output): return avg_time -# Input: a list of tuples of benchmark names -# Can change to input a file containing benchmarks to run -def run_benchmarks(builds_dir, output_filename, should_print_bin_output = True): - output_file = open(output_filename, "w") +def get_sorted_binaries(builds_dir): + # dict of network names to lists of binaries + # list of binaries should be in sorted order (can do that when we run the benchmarks) + network_bins = defaultdict(list) for bin_name in os.listdir(builds_dir): if bin_name.find("profiling") == -1: continue - output_file.write("%s: %s\n" % (bin_name, \ + network_name = bin_name[ : bin_name.rfind("_")] + network_bins[network_name].append(bin_name) + return network_bins + +# Input: a list of tuples of benchmark names +# Can change to input a file containing benchmarks to run +def run_benchmarks(sorted_bins, builds_dir, output_filename, should_print_bin_output = False): + def get_knob_id(bin_name): + return int(bin_name[bin_name.rfind("_") + 1 : ]) + + output_file = open(output_filename, "w", buffering = 0) + for network_name in sorted_bins: + # Sort the binaries in order by knob id + sorted_bins[network_name].sort(key = get_knob_id) + print("--------------------------------------") + print(network_name) + # Go through all binaries + for bin_name in sorted_bins[network_name]: + print(bin_name) + output_file.write("%s results\n" % bin_name) + '''output_file.write("%s: %s\n" % (bin_name, \ parse_binary_output(run_benchmark(os.path.join(builds_dir, bin_name), \ - should_print_bin_output)))) - print(bin_name) + should_print_bin_output))))''' + print("--------------------------------------\n") output_file.close() @@ -48,4 +68,5 @@ if __name__ == "__main__": print("Usage: python online_benchmark_testing_automator.py <builds dir> <outputs_file_name>") exit(1) print("Output file name: %s" % sys.argv[2]) - run_benchmarks(sys.argv[1], sys.argv[2]) + sorted_bins = get_sorted_binaries(sys.argv[1]) + run_benchmarks(sorted_bins, sys.argv[1], sys.argv[2])