Skip to content
Snippets Groups Projects
Commit 830e83d0 authored by Hashim Sharif's avatar Hashim Sharif
Browse files

Using 2 runs in the tuning phase and min_accuracy as threshold

parent 4ee6139f
No related branches found
No related tags found
No related merge requests found
...@@ -83,7 +83,23 @@ def getConfidence(accuracy_outfile, acc_threshold): ...@@ -83,7 +83,23 @@ def getConfidence(accuracy_outfile, acc_threshold):
return conf, avg_acc return conf, avg_acc
def getMinAccuracy(accuracy_outfile):
f = open(accuracy_outfile, "r")
total_acc = 0.0
failed = 0
it = 0
acc_list = []
for x in f:
acc = float(x.strip())
acc_list.append(acc)
return min(acc_list)
# NOTE: invokes the binary with the number of runs # NOTE: invokes the binary with the number of runs
def do_multiple_runs2(binary_name, accuracy_threshold, confidence_threshold): def do_multiple_runs2(binary_name, accuracy_threshold, confidence_threshold):
......
...@@ -5,6 +5,9 @@ import shutil ...@@ -5,6 +5,9 @@ import shutil
from measure_confidence2 import getConfigCost from measure_confidence2 import getConfigCost
AL_THRESHOLD = 0.1
class Config: class Config:
def __init__(self): def __init__(self):
self.avg_accuracy = 0 self.avg_accuracy = 0
...@@ -69,10 +72,6 @@ def loadConfigData(result_dir, layer_costs, baseline_accuracy): ...@@ -69,10 +72,6 @@ def loadConfigData(result_dir, layer_costs, baseline_accuracy):
AL_THRESHOLD = 0.1
SPEEDUP_BAND_SIZE = 0.3
ENERGY_BAND_SIZE = 10
class Configuration: class Configuration:
def __init__(self, name, speedup, energy, accuracy, accuracy_loss): def __init__(self, name, speedup, energy, accuracy, accuracy_loss):
...@@ -223,11 +222,17 @@ def findParetoConfigs(base_dir, layer_costs, accuracy): ...@@ -223,11 +222,17 @@ def findParetoConfigs(base_dir, layer_costs, accuracy):
config = Configuration(config.fname , config.speedup, 100, config.avg_accuracy, config.avg_loss) config = Configuration(config.fname , config.speedup, 100, config.avg_accuracy, config.avg_loss)
config_list.append(config) config_list.append(config)
SPEEDUP_BAND_SIZE = 1.0
ENERGY_BAND_SIZE = 10
if len(config_list) < 30: # No Pareto Selection if list is < 50 configurations
SPEEDUP_BAND_SIZE = 1.2 if len(config_list) < 50:
SPEEDUP_BAND_SIZE = 100 # Include all in Pareto Frontier
print ("*SPEEDUP_BAND_SIZE = ", SPEEDUP_BAND_SIZE)
ASC, AEC = compute_pareto_points_with_margin(config_list, SPEEDUP_BAND_SIZE, ENERGY_BAND_SIZE) ASC, AEC = compute_pareto_points_with_margin(config_list, SPEEDUP_BAND_SIZE, ENERGY_BAND_SIZE)
......
...@@ -22,6 +22,7 @@ import threading ...@@ -22,6 +22,7 @@ import threading
import psutil import psutil
from measure_confidence2 import dump_promise_confidence_files3 from measure_confidence2 import dump_promise_confidence_files3
from measure_confidence2 import getConfidence, getMinAccuracy
from select_top_results import select_top_results from select_top_results import select_top_results
from time import sleep from time import sleep
from pareto_curve import findParetoConfigs from pareto_curve import findParetoConfigs
...@@ -169,25 +170,36 @@ class ClangFlagsTuner(MeasurementInterface): ...@@ -169,25 +170,36 @@ class ClangFlagsTuner(MeasurementInterface):
createFlagsFile("promise_flags", cfg) createFlagsFile("promise_flags", cfg)
run_cmd = binary_name run_cmd = binary_name
print "binary_name = ", run_cmd print "\nbinary_name = ", run_cmd
#run_result_call_program = self.call_program(run_cmd) #run_result_call_program = self.call_program(run_cmd)
#print "returned \n\n"
total_runs = 2
FNULL = open(os.devnull, 'wb') FNULL = open(os.devnull, 'wb')
p = subprocess.Popen(run_cmd, stdout = FNULL) #p = subprocess.Popen(run_cmd, stdout = FNULL)
p = subprocess.Popen([run_cmd, str(total_runs)], stdout = FNULL)
p.wait() p.wait()
accuracy = getAccuracy("final_accuracy") accuracy = getAccuracy("final_accuracy")
# Get Confidence for multiple runs
conf, avg_acc = getConfidence("run_accuracies.txt", accuracy_threshold)
# getConfigCost returns the cost associated with the selected configuration # getConfigCost returns the cost associated with the selected configuration
total_comps = getConfigCost(cfg) total_comps = getConfigCost(cfg)
Result = opentuner.resultsdb.models.Result() Result = opentuner.resultsdb.models.Result()
Result.time = total_comps Result.time = total_comps
Result.accuracy = accuracy #Result.accuracy = accuracy
min_accuracy = getMinAccuracy("run_accuracies.txt")
if accuracy > accuracy_threshold: print ("min_accuracy = ", min_accuracy)
Result.accuracy = min_accuracy
# Only pass conf if conf == 100
if min_accuracy > accuracy_threshold and conf == 100:
print ("conf = ", conf, " avg_acc = ", avg_acc)
#if accuracy not in evaluated_configs: #if accuracy not in evaluated_configs:
config_tuple = (total_comps, accuracy, cfg) config_tuple = (total_comps, accuracy, cfg)
self.configs_list.append(config_tuple) self.configs_list.append(config_tuple)
...@@ -199,8 +211,6 @@ class ClangFlagsTuner(MeasurementInterface): ...@@ -199,8 +211,6 @@ class ClangFlagsTuner(MeasurementInterface):
f_acc.close() f_acc.close()
print "done with one run"
test_id += 1 test_id += 1
return Result return Result
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment