Skip to content
Snippets Groups Projects
Commit ca5a8b0c authored by Hashim Sharif's avatar Hashim Sharif
Browse files

Adding baseline configuration to autotuning outputs

parent 8137e1f7
No related branches found
No related tags found
No related merge requests found
......@@ -60,9 +60,10 @@ def isLayer(layer_comp):
return False
def getOpMapping(op_name):
print ("op_mapping ", op_name, op_mapping)
if op_name not in op_mapping:
print ("ERROR: OP not found!! = ", op_name, "\n")
sys.exit(0)
......@@ -312,6 +313,8 @@ def generateBaselineConfig(layer_comp):
def buildConfigStr(config, layer_desc):
index = 1
......@@ -389,14 +392,53 @@ def dumpConfig(layer_desc, config_arrs, result_dir):
f.write("-----\n")
it += 1
def dumpBaseLineConfig(conf_id, perf_improv, energy_red, \
baseline_acc, bench_layer_composition, f_out):
f_out.write("+++++\n")
f_out.write("conf" + str(conf_id) + " " + str(perf_improv) + " " + str(energy_red) + " " + \
str(baseline_acc) + " " + str(0) + "\n")
config_str = genFP32Config(bench_layer_composition)
f_out.write(config_str)
f_out.write("-----\n")
def genFP32Config(layer_comp):
target = "gpu"
it = 1
config_str = ""
for layer in layer_comp:
config_str += str(it) + " "
config_str += target + " "
for op in layer:
op_name = getOpMapping(op)
config_str += str(op_name) + " fp32 1 "
config_str += "\n"
it += 1
config_str += str(it) + " " + target + " softmax fp32 1\n"
return config_str
# ***** Exported Interface --- Generates file used by HPVM RT controller ******/
def dumpDevConfigsToRTFile(configurations, config_out_path, bench_layer_composition):
def dumpDevConfigsToRTFile(configurations, config_out_path, bench_layer_composition, baseline_acc):
f = open(config_out_path, "w+")
it = 1
dumpBaseLineConfig(1, 1.0, 0, baseline_acc, bench_layer_composition, f)
it = 2
for config in configurations:
f.write("+++++\n")
f.write("conf" + str(it) + " " + str(config.speedup) + " 0 " + \
......
......@@ -19,7 +19,7 @@ class DevTimeTuner:
def __init__(self, Bench):
self.piped_execution = True
self.autotuner_runs = 500
self.autotuner_runs = 100
self.promise_binary = Bench.promise_binary
......@@ -110,15 +110,16 @@ class DevTimeTuner:
sorted_configurations = sorted(configurations, key=lambda conf: conf.speedup)
best_conf = sorted_configurations[-1]
if len(sorted_configurations) > 0:
best_conf = sorted_configurations[-1]
conf_file.write("speedup = " + str(best_conf.speedup) + \
conf_file.write("speedup = " + str(best_conf.speedup) + \
" avg_loss = " + str(best_conf.avg_loss) + "\n")
for flag in best_conf.flags:
conf_file.write(str(flag) + "\n")
for flag in best_conf.flags:
conf_file.write(str(flag) + "\n")
conf_file.close()
conf_file.close()
......@@ -136,7 +137,7 @@ class DevTimeTuner:
buildRtConfig.adjustConfigLosses(sorted_configurations)
buildRtConfig.dumpDevConfigsToRTFile(sorted_configurations, \
config_out_path, bench_layer_composition)
config_out_path, bench_layer_composition, self.gold_accuracy)
plot_file_path = self.result_dir + "dev_all_conf_plot.png"
genPlots.genScatterPlotFromConfigs(sorted_configurations, plot_file_path)
......@@ -165,7 +166,7 @@ class DevTimeTuner:
buildRtConfig.adjustConfigLosses(sorted_configurations)
buildRtConfig.dumpDevConfigsToRTFile(sorted_configurations, \
config_out_path, bench_layer_composition)
config_out_path, bench_layer_composition, self.gold_accuracy)
plot_file_path = self.result_dir + "dev_pareto_plot.png"
genPlots.genScatterPlotFromConfigs(sorted_configurations, plot_file_path)
......@@ -191,7 +192,7 @@ class DevTimeTuner:
buildRtConfig.adjustConfigLosses(sorted_configurations)
buildRtConfig.dumpDevConfigsToRTFile(sorted_configurations, \
config_out_path, bench_layer_composition)
config_out_path, bench_layer_composition, self.gold_accuracy)
plot_file_path = self.result_dir + "true_pareto_plot.png"
genPlots.genScatterPlotFromConfigs(sorted_configurations, plot_file_path)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment