Skip to content
Snippets Groups Projects
Commit 207508c3 authored by Hashim Sharif's avatar Hashim Sharif
Browse files

Updating Skip Layer Selection to incorporate Op Cost

parent 8faad984
No related branches found
No related tags found
No related merge requests found
......@@ -37,9 +37,9 @@ Alexnet1.skip_layers = 0
Alexnet1.skip_layer_str = "5_0"
Alexnet1.base_dir = "../build_tuner/tuner_results/alexnet_cifar10/"
Alexnet1.result_dir_1 = "../build_tuner/tuner_results/alexnet_cifar10/loss_1/batch11"
Alexnet1.result_dir_2 = "../build_tuner/tuner_results/alexnet_cifar10/loss_2/batch11"
Alexnet1.result_dir_3 = "../build_tuner/tuner_results/alexnet_cifar10/loss_3/batch11"
Alexnet1.result_dir_1 = "../build_tuner/tuner_results/alexnet_cifar10/loss_1/batch12"
Alexnet1.result_dir_2 = "../build_tuner/tuner_results/alexnet_cifar10/loss_2/batch12"
Alexnet1.result_dir_3 = "../build_tuner/tuner_results/alexnet_cifar10/loss_3/batch12"
Alexnet1.tensor_desc_file = "tuner_results/alexnet_cifar10/alexnet_tensors.txt"
Alexnet1.layer_file = "tuner_results/alexnet_cifar10/alexnet_layers.txt"
......@@ -52,7 +52,7 @@ Alexnet1.loss2_result_file = "tuner_results/alexnet_cifar10/loss_2/promise_tuned
Alexnet1.autotuner_runs = 1000
Alexnet1.tuner_accuracy = 79.9
Alexnet1.promise_accuracy = 79.9
Alexnet1.validation_accuracy = 79.16
Alexnet1.validation_accuracy = 79.19
bench_tuner_data["alexnet_cifar10"] = Alexnet1
......@@ -70,9 +70,9 @@ Alexnet2.start_promise_range = 3
Alexnet2.skip_layer_str = "6_1_0"
Alexnet2.base_dir = "../build_tuner/tuner_results/alexnet2_cifar10/"
Alexnet2.result_dir_1 = "../build_tuner/tuner_results/alexnet2_cifar10/loss_1/batch11"
Alexnet2.result_dir_2 = "../build_tuner/tuner_results/alexnet2_cifar10/loss_2/batch11"
Alexnet2.result_dir_3 = "../build_tuner/tuner_results/alexnet2_cifar10/loss_3/batch11"
Alexnet2.result_dir_1 = "../build_tuner/tuner_results/alexnet2_cifar10/loss_1/batch12"
Alexnet2.result_dir_2 = "../build_tuner/tuner_results/alexnet2_cifar10/loss_2/batch12"
Alexnet2.result_dir_3 = "../build_tuner/tuner_results/alexnet2_cifar10/loss_3/batch12"
Alexnet2.tensor_desc_file = "tuner_results/alexnet2_cifar10/alexnet2_tensors.txt"
Alexnet2.layer_file = "tuner_results/alexnet2_cifar10/alexnet2_layers.txt"
Alexnet2.cost_file = "../build_tuner/tuner_results/alexnet2_cifar10/op_cost.txt"
......@@ -81,7 +81,7 @@ Alexnet2.cost_file = "../build_tuner/tuner_results/alexnet2_cifar10/op_cost.txt"
Alexnet2.autotuner_runs = 1000
Alexnet2.tuner_accuracy = 84.19
Alexnet2.promise_accuracy = 84.19
Alexnet2.validation_accuracy = 85.09
Alexnet2.validation_accuracy = 85.15
bench_tuner_data["alexnet2_cifar10"] = Alexnet2
......@@ -100,9 +100,9 @@ Alexnet3.start_promise_range = 3
Alexnet3.skip_layer_str = "14_3_4_1_6"
Alexnet3.base_dir = "../build_tuner/tuner_results/vgg16_cifar10/"
Alexnet3.result_dir_1 = "../build_tuner/tuner_results/vgg16_cifar10/loss_1/batch11"
Alexnet3.result_dir_2 = "../build_tuner/tuner_results/vgg16_cifar10/loss_2/batch11"
Alexnet3.result_dir_3 = "../build_tuner/tuner_results/vgg16_cifar10/loss_3/batch11"
Alexnet3.result_dir_1 = "../build_tuner/tuner_results/vgg16_cifar10/loss_1/batch12"
Alexnet3.result_dir_2 = "../build_tuner/tuner_results/vgg16_cifar10/loss_2/batch12"
Alexnet3.result_dir_3 = "../build_tuner/tuner_results/vgg16_cifar10/loss_3/batch12"
Alexnet3.tensor_desc_file = "tuner_results/vgg16_cifar10/vgg16_tensors.txt"
Alexnet3.layer_file = "tuner_results/vgg16_cifar10/vgg16_layers.txt"
......@@ -114,7 +114,7 @@ Alexnet3.loss2_result_file = "tuner_results/vgg16_cifar10/loss_2/promise_tuned_c
Alexnet3.autotuner_runs = 1000
Alexnet3.tuner_accuracy = 90.19
Alexnet3.promise_accuracy = 90.19
Alexnet3.validation_accuracy = 89.41
Alexnet3.validation_accuracy = 89.05
bench_tuner_data["vgg16_cifar10"] = Alexnet3
......@@ -132,9 +132,9 @@ Alexnet4.start_promise_range = 5
#Alexnet4.skip_layer_str = "0"
Alexnet4.skip_layer_str = "0_1_2_14_15_17_18_21"
Alexnet4.base_dir = "../build_tuner/tuner_results/resnet18_cifar10/"
Alexnet4.result_dir_1 = "../build_tuner/tuner_results/resnet18_cifar10/loss_1/batch11"
Alexnet4.result_dir_2 = "../build_tuner/tuner_results/resnet18_cifar10/loss_2/batch11"
Alexnet4.result_dir_3 = "../build_tuner/tuner_results/resnet18_cifar10/loss_3/batch11"
Alexnet4.result_dir_1 = "../build_tuner/tuner_results/resnet18_cifar10/loss_1/batch12"
Alexnet4.result_dir_2 = "../build_tuner/tuner_results/resnet18_cifar10/loss_2/batch12"
Alexnet4.result_dir_3 = "../build_tuner/tuner_results/resnet18_cifar10/loss_3/batch12"
Alexnet4.tensor_desc_file = "tuner_results/resnet18_cifar10/resnet_tensors.txt"
Alexnet4.layer_file = "tuner_results/resnet18_cifar10/resnet_layers.txt"
Alexnet4.cost_file = "../build_tuner/tuner_results/resnet18_cifar10/op_cost.txt"
......@@ -145,7 +145,7 @@ Alexnet4.loss2_result_file = "tuner_results/resnet18_cifar10/loss_2/promise_tune
Alexnet4.autotuner_runs = 1000
Alexnet4.tuner_accuracy = 89.6
Alexnet4.promise_accuracy = 89.59
Alexnet4.validation_accuracy = 89.44
Alexnet4.validation_accuracy = 89.65
bench_tuner_data["resnet18_cifar10"] = Alexnet4
......@@ -165,9 +165,9 @@ Alexnet5.start_promise_range = 3
#Alexnet5.skip_layer_str = "0"
Alexnet5.skip_layer_str = "0_1_2_3_4"
Alexnet5.base_dir = "../build_tuner/tuner_results/vgg16_cifar100/"
Alexnet5.result_dir_1 = "../build_tuner/tuner_results/vgg16_cifar100/loss_1/batch11"
Alexnet5.result_dir_2 = "../build_tuner/tuner_results/vgg16_cifar100/loss_2/batch11"
Alexnet5.result_dir_3 = "../build_tuner/tuner_results/vgg16_cifar100/loss_3/batch11"
Alexnet5.result_dir_1 = "../build_tuner/tuner_results/vgg16_cifar100/loss_1/batch12"
Alexnet5.result_dir_2 = "../build_tuner/tuner_results/vgg16_cifar100/loss_2/batch12"
Alexnet5.result_dir_3 = "../build_tuner/tuner_results/vgg16_cifar100/loss_3/batch12"
Alexnet5.tensor_desc_file = "../build_tuner/tuner_results/vgg16_cifar100/vgg16_tensors.txt"
Alexnet5.layer_file = "../build_tuner/tuner_results/vgg16_cifar100/vgg16_layers.txt"
......@@ -178,7 +178,7 @@ Alexnet5.loss2_result_file = "tuner_results/vgg_cifar100/loss_2/promise_tuned_co
Alexnet5.autotuner_runs = 1000
Alexnet5.tuner_accuracy = 67.95
Alexnet5.promise_accuracy = 66.8
Alexnet5.validation_accuracy = 66.15
Alexnet5.validation_accuracy = 68.65
bench_tuner_data["vgg16_cifar100"] = Alexnet5
......@@ -197,9 +197,9 @@ Alexnet6.start_promise_range = 1
Alexnet6.skip_layer_str = "0"
Alexnet6.base_dir = "../build_tuner/tuner_results/lenet_keras/"
Alexnet6.result_dir_1 = "../build_tuner/tuner_results/lenet_keras/loss_1/batch4"
Alexnet6.result_dir_2 = "../build_tuner/tuner_results/lenet_keras/loss_2/batch4"
Alexnet6.result_dir_3 = "../build_tuner/tuner_results/lenet_keras/loss_3/batch4"
Alexnet6.result_dir_1 = "../build_tuner/tuner_results/lenet_keras/loss_1/batch12"
Alexnet6.result_dir_2 = "../build_tuner/tuner_results/lenet_keras/loss_2/batch12"
Alexnet6.result_dir_3 = "../build_tuner/tuner_results/lenet_keras/loss_3/batch12"
Alexnet6.tensor_desc_file = "tuner_results/lenet_keras/lenet_tensors.txt"
Alexnet6.layer_file = "tuner_results/lenet_keras/lenet_layers.txt"
......@@ -230,9 +230,9 @@ Alexnet7.start_promise_range = 3
#Alexnet7.skip_layer_str = "0"
Alexnet7.skip_layer_str = "1_14_0_6_2"
Alexnet7.base_dir = "../build_tuner/tuner_results/mobilenet/"
Alexnet7.result_dir_1 = "../build_tuner/tuner_results/mobilenet/loss_1/batch11"
Alexnet7.result_dir_2 = "../build_tuner/tuner_results/mobilenet/loss_2/batch11"
Alexnet7.result_dir_3 = "../build_tuner/tuner_results/mobilenet/loss_3/batch11"
Alexnet7.result_dir_1 = "../build_tuner/tuner_results/mobilenet/loss_1/batch12"
Alexnet7.result_dir_2 = "../build_tuner/tuner_results/mobilenet/loss_2/batch12"
Alexnet7.result_dir_3 = "../build_tuner/tuner_results/mobilenet/loss_3/batch12"
Alexnet7.tensor_desc_file = "tuner_results/mobilenet/mobilenet_ops.txt"
Alexnet7.layer_file = "tuner_results/mobilenet/mobilenet_layer_comp.txt"
......@@ -244,7 +244,7 @@ Alexnet7.loss2_result_file = "tuner_results/mobilenet/loss_2/batch1/promise_tune
Alexnet7.autotuner_runs = 1000
Alexnet7.tuner_accuracy = 84.8
Alexnet7.promise_accuracy = 84.8
Alexnet7.validation_accuracy = 83.6
Alexnet7.validation_accuracy = 84.4
bench_tuner_data["mobilenet_cifar10"] = Alexnet7
......@@ -264,9 +264,9 @@ Alexnet8.skip_layer_str = "7_0_1"
Alexnet8.base_dir = "../build_tuner/tuner_results/mobilenet_shallow/"
#Alexnet8.result_dir_3 = "../build_tuner/tuner_results/mobilenet_shallow/loss_3/batch7"
#Alexnet8.result_dir_1 = "../build_tuner/tuner_results/mobilenet_shallow/loss_1/batch10"
Alexnet8.result_dir_1 = "../build_tuner/tuner_results/mobilenet_shallow/loss_1/batch11"
Alexnet8.result_dir_2 = "../build_tuner/tuner_results/mobilenet_shallow/loss_2/batch11"
Alexnet8.result_dir_3 = "../build_tuner/tuner_results/mobilenet_shallow/loss_3/batch11"
Alexnet8.result_dir_1 = "../build_tuner/tuner_results/mobilenet_shallow/loss_1/batch12"
Alexnet8.result_dir_2 = "../build_tuner/tuner_results/mobilenet_shallow/loss_2/batch12"
Alexnet8.result_dir_3 = "../build_tuner/tuner_results/mobilenet_shallow/loss_3/batch12"
Alexnet8.tensor_desc_file = "../build_tuner/tuner_results/mobilenet_shallow/mobilenet_shallow_ops.txt"
Alexnet8.layer_file = "../build_tuner/tuner_results/mobilenet_shallow/mobilenet_shallow_layer_comp.txt"
......@@ -278,7 +278,7 @@ Alexnet8.loss2_result_file = "../build_tuner/tuner_results/mobilenet_shallow/los
Alexnet8.autotuner_runs = 1000
Alexnet8.tuner_accuracy = 87.6
Alexnet8.promise_accuracy = 87.59
Alexnet8.validation_accuracy = 88.2
Alexnet8.validation_accuracy = 88.5
bench_tuner_data["mobilenet_shallow"] = Alexnet8
......
......@@ -3,6 +3,9 @@
import subprocess
import os
import operator
from benchmarks import bench_tuner_data
from swing_selection import loadLayerDesc
import math
def constructTunerFile(num_flags, tensor_id, error_level, default_error):
......@@ -21,7 +24,7 @@ def constructTunerFile(num_flags, tensor_id, error_level, default_error):
def runAndTestError(binary_name, gold_acc):
num_runs = 20
num_runs = 10
binary_name = "./" + binary_name
FNULL = open(os.devnull, 'wb')
......@@ -41,6 +44,15 @@ def runAndTestError(binary_name, gold_acc):
def roundDecimal(val):
new_val = int(val * 10000)
new_val = float(new_val) / 10000
return new_val
def test_sensitivity(Bench):
......@@ -54,7 +66,7 @@ def test_sensitivity(Bench):
for error_level in error_levels:
constructTunerFile(num_flags, tensor_id, error_level, 0)
error = runAndTestError(Bench.tuner_binary, Bench.tuner_accuracy)
print (tensor_id, error_level, error)
#print (tensor_id, error_level, error)
total_error += error
avg_error = total_error / len(error_levels)
......@@ -64,7 +76,7 @@ def test_sensitivity(Bench):
print ("\n\n*** Per-Tensor Avg Errors \n\n")
f_name = Bench.base_dir + "/tensor_errors_1000.txt"
f_name = Bench.base_dir + "/tensor_errors_multiple.txt"
f = open(f_name, "w+")
for i in range(len(tensor_errors)):
print (i, tensor_errors[i][1])
......@@ -91,7 +103,7 @@ def test_sensitivity2(Bench):
num_flags = Bench.num_flags
constructTunerFile(num_flags, 0, 3, 3)
constructTunerFile(num_flags, 0, 6, 6)
error = runAndTestError(Bench.tuner_binary, Bench.tuner_accuracy)
ref_acc = Bench.tuner_accuracy - error
......@@ -105,7 +117,7 @@ def test_sensitivity2(Bench):
for tensor_id in range(num_flags):
total_error = 0
for error_level in error_levels:
constructTunerFile(num_flags, tensor_id, error_level, 3)
constructTunerFile(num_flags, tensor_id, error_level, 6)
error = runAndTestError(Bench.tuner_binary, ref_acc)
print (tensor_id, error_level, error)
total_error += error
......@@ -140,6 +152,223 @@ def test_sensitivity2(Bench):
def test_sensitivity3(Bench):
tensor_errors = []
error_levels = [2, 5, 8, 11, 14, 17]
num_flags = Bench.num_flags
for tensor_id in range(num_flags):
total_error = 0
errors = []
for error_level in error_levels:
constructTunerFile(num_flags, tensor_id, error_level, 0)
error = runAndTestError(Bench.tuner_binary, Bench.tuner_accuracy)
print (tensor_id, error_level, error)
errors.append(error)
tensor_errors.append([tensor_id, errors])
print ("\n\n*** Per-Tensor Avg Errors \n\n")
f_name = Bench.base_dir + "/tensor_errors_multiple.txt"
f = open(f_name, "w+")
for i in range(len(tensor_errors)):
print (i, tensor_errors[i][1])
f.write(str(i))
for j in range(len(tensor_errors[i][1])):
val = roundDecimal(tensor_errors[i][1][j])
f.write("\t" + str(val) )
f.write("\n")
f.close()
def test_sensitivity4(Bench):
num_flags = Bench.num_flags
constructTunerFile(num_flags, 0, 5, 5)
error = runAndTestError(Bench.tuner_binary, Bench.tuner_accuracy)
ref_acc = Bench.tuner_accuracy - error
print ("*** Gold accuracy = ", Bench.tuner_accuracy, " Ref accuracy = ", ref_acc, " *** \n\n")
tensor_errors = []
error_levels = [4, 8, 11, 14, 16, 19]
for tensor_id in range(num_flags):
errors = []
for error_level in error_levels:
constructTunerFile(num_flags, tensor_id, error_level, 5)
error = runAndTestError(Bench.tuner_binary, ref_acc)
print (tensor_id, error_level, error)
errors.append(error)
tensor_errors.append([tensor_id, errors])
print ("\n\n*** Per-Tensor Avg Errors \n\n")
f_name = Bench.base_dir + "/composite_errors.txt"
f = open(f_name, "w+")
for i in range(len(tensor_errors)):
print (i, tensor_errors[i][1])
f.write(str(i))
for j in range(len(tensor_errors[i][1])):
val = roundDecimal(tensor_errors[i][1][j])
f.write("\t" + str(val) )
f.write("\n")
f.close()
def readTensorErrs(result_dir):
tensor_errs = []
f = open(result_dir + "/tensor_errors.txt")
for x in f:
err = float(x.split()[1])
tensor_errs.append(err)
return tensor_errs
def readTensorErrs2(result_dir):
tensor_errs = []
f = open(result_dir + "/tensor_errors_multiple.txt")
for x in f:
toks = x.split()
total_err = 0.0
for tok in toks[2:-1]:
err = float(tok)
total_err += err
avg_err = total_err / len(toks[2:-1])
tensor_errs.append(avg_err)
return tensor_errs
def isSkipLayer(layer):
if "dense" in layer or "conv" in layer:
return False
else:
return True
def readLayerCosts(cost_file):
f = open(cost_file)
layer_costs = []
for x in f:
cost = float(x.strip())
layer_costs.append(cost)
return layer_costs
def select_skip_layers(Bench, percent_to_skip):
result_dir = Bench.base_dir
layer_file = Bench.layer_file
tensor_errs = readTensorErrs2(result_dir)
layer_costs = readLayerCosts(Bench.cost_file)
layer_desc = loadLayerDesc(layer_file)
it = 0
index = 0
layer_errs = []
for layer in layer_desc:
layer_len = len(layer)
avg_err = tensor_errs[index]
index += layer_len
if isSkipLayer(layer):
continue
cost = (math.sqrt(layer_costs[it])) / 100;
ERR_IMPACT = avg_err / cost
#print ("layer, ", it, " avg_err = ", avg_err, " cost = ", cost, " err_impact = ", ERR_IMPACT)
layer_errs.append((ERR_IMPACT, it))
it += 1
layer_errs.sort(key=operator.itemgetter(0), reverse=True)
to_skip = len(layer_errs)
to_skip = math.ceil((percent_to_skip / 100.0) * to_skip)
skip_str = ""
it = 0
for err in layer_errs:
if it >= to_skip:
break
skip_str += str(err[1])
if it < to_skip - 1:
skip_str += "_"
it += 1
return skip_str
if __name__ == "__main__":
AlexNet = bench_tuner_data["alexnet_cifar10"]
skip_str = select_skip_layers(AlexNet, 15)
print ("AlexNet skip_str = ", skip_str)
AlexNet2 = bench_tuner_data["alexnet2_cifar10"]
skip_str = select_skip_layers(AlexNet2, 15)
print ("AlexNet2 skip_str = ", skip_str)
VGG16 = bench_tuner_data["vgg16_cifar10"]
skip_str = select_skip_layers(VGG16, 30)
print ("VGG16 skip_str = ", skip_str)
VGG16_100 = bench_tuner_data["vgg16_cifar100"]
skip_str = select_skip_layers(VGG16_100, 15)
print ("VGG16_100 skip_str = ", skip_str)
ResNet = bench_tuner_data["resnet18_cifar10"]
skip_str = select_skip_layers(ResNet, 10)
print ("ResNet skip_str = ", skip_str)
MobileNet = bench_tuner_data["mobilenet_cifar10"]
skip_str = select_skip_layers(MobileNet, 15)
print ("MobileNet skip_str = ", skip_str)
MobileNet_SH = bench_tuner_data["mobilenet_shallow"]
skip_str = select_skip_layers(MobileNet_SH, 25)
print ("MobileNet_SH skip_str = ", skip_str)
......@@ -2,14 +2,14 @@
import os
import subprocess
from error_sensitivity import select_skip_layers
def runPromiseTunerCmd(Bench, dir_prefix, result_dir, acc_threshold):
def runPromiseTunerCmd(Bench, dir_prefix, result_dir, acc_threshold, autotuner_runs, skip_layers):
tuner_cmd = "python ../opentuner/autotuner/promise_tuner3.py "
tuner_cmd += " --test-limit "
tuner_cmd += str(Bench.autotuner_runs)
tuner_cmd += str(autotuner_runs)
tuner_cmd += " --binary ./"
tuner_cmd += Bench.promise_binary
tuner_cmd += " --num-flags "
......@@ -35,7 +35,8 @@ def runPromiseTunerCmd(Bench, dir_prefix, result_dir, acc_threshold):
#tuner_cmd += str(Bench.skip_layers)
tuner_cmd += " --gpu-layers 0 "
tuner_cmd += " --skip-layers \""
tuner_cmd += str(Bench.skip_layer_str) + "\""
#tuner_cmd += str(Bench.skip_layer_str) + "\""
tuner_cmd += str(skip_layers) + "\""
print (tuner_cmd)
......@@ -49,14 +50,20 @@ def runPromiseTunerCmd(Bench, dir_prefix, result_dir, acc_threshold):
def runPromiseBench(Bench):
# NOTE-IMP: Changing current directory to one with promise binaries
os.chdir("../build_promise/")
result_dir_prefix = "../build_tuner/"
dir_prefix = "../build_tuner/"
#Bench = bench_tuner_data[bench_name]
runPromiseTunerCmd(Bench, result_dir_prefix, Bench.result_dir_3, 2.85)
runPromiseTunerCmd(Bench, result_dir_prefix, Bench.result_dir_2, 1.90)
runPromiseTunerCmd(Bench, result_dir_prefix, Bench.result_dir_1, 0.95)
tuner_runs1 = Bench.autotuner_runs
#tuner_runs2 = Bench.autotuner_runs * 2
#tuner_runs3 = Bench.autotuner_runs * 4
skip_layers1 = "0_" + select_skip_layers(Bench, 10)
skip_layers2 = "0_" + select_skip_layers(Bench, 25)
skip_layers3 = "0_" + select_skip_layers(Bench, 40)
runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_3, 2.55, tuner_runs1, skip_layers3)
runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_2, 1.7, tuner_runs1, skip_layers2)
runPromiseTunerCmd(Bench, dir_prefix, Bench.result_dir_1, 0.85, tuner_runs1, skip_layers1)
"""
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment