Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
H
hpvm-release
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
llvm
hpvm-release
Commits
4be13187
Commit
4be13187
authored
6 years ago
by
Hashim Sharif
Browse files
Options
Downloads
Patches
Plain Diff
Dumping rankings with validation
parent
fc9f00ba
No related branches found
No related tags found
No related merge requests found
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
llvm/projects/hpvm-tensor-rt/bin/run_autotuner.py
+70
-32
70 additions, 32 deletions
llvm/projects/hpvm-tensor-rt/bin/run_autotuner.py
llvm/projects/hpvm-tensor-rt/bin/swing_selection.py
+10
-4
10 additions, 4 deletions
llvm/projects/hpvm-tensor-rt/bin/swing_selection.py
with
80 additions
and
36 deletions
llvm/projects/hpvm-tensor-rt/bin/run_autotuner.py
+
70
−
32
View file @
4be13187
import
os
import
sys
import
subprocess
import
shutil
from
swing_selection
import
compute_swing_selection
...
...
@@ -54,8 +55,8 @@ Alexnet2.num_flags = 23
Alexnet2
.
num_layers
=
7
Alexnet2
.
error_range_1
=
9
Alexnet2
.
error_range_2
=
11
Alexnet2
.
result_dir_1
=
"
tuner_results/alexnet2_cifar10/loss_1/batch
1
"
Alexnet2
.
result_dir_2
=
"
tuner_results/alexnet2_cifar10/loss_2/batch
1
"
Alexnet2
.
result_dir_1
=
"
tuner_results/alexnet2_cifar10/loss_1/batch
2
"
Alexnet2
.
result_dir_2
=
"
tuner_results/alexnet2_cifar10/loss_2/batch
2
"
Alexnet2
.
tensor_desc_file
=
"
tuner_results/alexnet2_cifar10/alexnet2_tensors.txt
"
Alexnet2
.
layer_file
=
"
tuner_results/alexnet2_cifar10/alexnet2_layers.txt
"
Alexnet2
.
autotuner_runs
=
800
...
...
@@ -69,17 +70,18 @@ bench_tuner_data["alexnet2_cifar10"] = Alexnet2
Alexnet3
=
Benchmark
()
Alexnet3
.
tuner_binary
=
"
vgg16_cifar10_tuner
"
Alexnet3
.
promise_binary
=
"
vgg16_promise_quant
"
Alexnet3
.
promise_binary
=
"
./
vgg16_
cifar10_
promise_quant
"
Alexnet3
.
validation_binary
=
"
vgg16_cifar10_valid
"
Alexnet3
.
num_flags
=
50
Alexnet3
.
num_layers
=
15
Alexnet3
.
error_range_1
=
9
Alexnet3
.
error_range_2
=
11
Alexnet3
.
start_promise_range
=
4
Alexnet3
.
result_dir_1
=
"
tuner_results/vgg16_cifar10/loss_1/batch1
"
Alexnet3
.
result_dir_2
=
"
tuner_results/vgg16_cifar10/loss_2/batch1
"
Alexnet3
.
tensor_desc_file
=
"
tuner_results/vgg16_cifar10/vgg16_tensors.txt
"
Alexnet3
.
layer_file
=
"
tuner_results/vgg16_cifar10/vgg16_layers.txt
"
Alexnet3
.
autotuner_runs
=
8
00
Alexnet3
.
autotuner_runs
=
9
00
Alexnet3
.
tuner_accuracy
=
89.75
Alexnet3
.
promise_accuracy
=
89.75
Alexnet3
.
validation_accuracy
=
89.41
...
...
@@ -100,7 +102,7 @@ Alexnet4.result_dir_1 = "tuner_results/resnet18_cifar10/loss_1/batch1"
Alexnet4
.
result_dir_2
=
"
tuner_results/resnet18_cifar10/loss_2/batch1
"
Alexnet4
.
tensor_desc_file
=
"
tuner_results/resnet18_cifar10/resnet_tensors.txt
"
Alexnet4
.
layer_file
=
"
tuner_results/resnet18_cifar10/resnet_layers.txt
"
Alexnet4
.
autotuner_runs
=
8
00
Alexnet4
.
autotuner_runs
=
15
00
Alexnet4
.
tuner_accuracy
=
89.54
Alexnet4
.
promise_accuracy
=
89.54
Alexnet4
.
validation_accuracy
=
89.44
...
...
@@ -174,6 +176,8 @@ def runPromiseBench(bench_name):
tuner_cmd
+=
Bench
.
promise_binary
tuner_cmd
+=
"
--num-flags
"
tuner_cmd
+=
str
(
Bench
.
num_layers
)
tuner_cmd
+=
"
--start-range
"
tuner_cmd
+=
str
(
Bench
.
start_promise_range
)
tuner_cmd
+=
"
--error-range
"
tuner_cmd
+=
str
(
10
)
tuner_cmd
+=
"
--result-dir
"
...
...
@@ -198,6 +202,8 @@ def runPromiseBench(bench_name):
tuner_cmd
+=
Bench
.
promise_binary
tuner_cmd
+=
"
--num-flags
"
tuner_cmd
+=
str
(
Bench
.
num_layers
)
tuner_cmd
+=
"
--start-range
"
tuner_cmd
+=
str
(
Bench
.
start_promise_range
)
tuner_cmd
+=
"
--error-range
"
tuner_cmd
+=
str
(
10
)
tuner_cmd
+=
"
--result-dir
"
...
...
@@ -216,28 +222,28 @@ def runPromiseBench(bench_name):
p
.
wait
()
def
runPromiseTuner
():
runPromiseBench
(
"
alexnet2_cifar10
"
)
runPromiseBench
(
"
alexnet_cifar10
"
)
def
computeBenchSwings
(
bench_name
):
loss_confs
=
[]
conf_ranks
=
[]
# Swing selection for 1% and 2% results
Bench
=
bench_tuner_data
[
bench_name
]
tuned_result_dir
=
Bench
.
result_dir_1
+
"
/high_confidence/
"
layer_file
=
Bench
.
layer_file
layer_swings
=
compute_swing_selection
(
tuned_result_dir
,
layer_file
)
layer_swings
,
file_names
=
compute_swing_selection
(
tuned_result_dir
,
layer_file
)
loss_confs
.
append
(
layer_swings
)
conf_ranks
.
append
(
file_names
)
print
(
file_names
)
tuned_result_dir
=
Bench
.
result_dir_2
+
"
/high_confidence/
"
layer_swings
=
compute_swing_selection
(
tuned_result_dir
,
layer_file
)
layer_swings
,
file_names
=
compute_swing_selection
(
tuned_result_dir
,
layer_file
)
loss_confs
.
append
(
layer_swings
)
conf_ranks
.
append
(
file_names
)
print
(
file_names
)
return
loss_confs
return
loss_confs
,
conf_ranks
...
...
@@ -252,7 +258,8 @@ def getLayerString(layer_swings):
return
layer_string
def
testValidationRun
(
Bench
,
validation_dir
,
layer_swings
,
threshold
):
def
testValidationRun
(
Bench
,
validation_dir
,
layer_swings
,
threshold
,
rank_str
):
os
.
chdir
(
"
../build_promise/
"
)
...
...
@@ -290,7 +297,7 @@ def testValidationRun(Bench, validation_dir, layer_swings, threshold):
avg_acc
=
sum_acc
/
index
out_fname
=
validation_dir
+
validation_binary
+
"
_
"
+
str
(
avg_acc
)
shutil
.
copy
(
"
run_accuracies.txt
"
,
out_fname
)
shutil
.
copy
(
"
run_accuracies.txt
"
,
out_fname
+
"
_
"
+
rank_str
)
layer_string
=
getLayerString
(
layer_swings
)
f
=
open
(
out_fname
,
"
w
"
)
...
...
@@ -381,17 +388,30 @@ def dumpValidatedConfigs(configs, result_dir, layer_desc_file,
f
.
write
(
"
\n
"
)
f
.
close
()
def
dumpRankings
(
validated_ranks
,
result_dir
,
rank_file
):
os
.
chdir
(
"
../build_tuner/
"
)
f
=
open
(
result_dir
+
"
/
"
+
rank_file
,
"
w+
"
)
for
rank
in
validated_ranks
:
f
.
write
(
rank
+
"
\n
"
)
f
.
close
()
def
runBenchValidation
(
bench_name
):
Bench
=
bench_tuner_data
[
bench_name
]
loss_confs
=
computeBenchSwings
(
bench_name
)
loss_confs
,
conf_ranks
=
computeBenchSwings
(
bench_name
)
loss1_confs
=
loss_confs
[
0
]
loss2_confs
=
loss_confs
[
1
]
conf_ranks1
=
conf_ranks
[
0
]
conf_ranks2
=
conf_ranks
[
1
]
validation_dir_1
=
"
../build_tuner/
"
+
Bench
.
result_dir_1
+
"
/validation_runs/
"
if
not
os
.
path
.
exists
(
validation_dir_1
):
...
...
@@ -401,33 +421,41 @@ def runBenchValidation(bench_name):
if
not
os
.
path
.
exists
(
validation_dir_2
):
os
.
mkdir
(
validation_dir_2
)
ind
=
0
validated_confs1
=
[]
validated_ranks1
=
[]
failed_confs1
=
[]
confidences1
=
[]
for
layer_swings
in
loss1_confs
:
confidence
=
testValidationRun
(
Bench
,
validation_dir_1
,
layer_swings
,
1.0
)
confidence
=
testValidationRun
(
Bench
,
validation_dir_1
,
layer_swings
,
1.0
,
conf_ranks1
[
ind
])
if
confidence
>
95
:
validated_confs1
.
append
(
layer_swings
)
confidences1
.
append
(
confidence
)
validated_ranks1
.
append
(
conf_ranks1
[
ind
])
else
:
failed_confs1
.
append
(
layer_swings
)
ind
+=
1
ind
=
0
validated_confs2
=
[]
validated_ranks2
=
[]
failed_confs2
=
[]
confidences2
=
[]
for
layer_swings
in
loss2_confs
:
confidence
=
testValidationRun
(
Bench
,
validation_dir_2
,
layer_swings
,
2.0
)
confidence
=
testValidationRun
(
Bench
,
validation_dir_2
,
layer_swings
,
2.0
,
conf_ranks2
[
ind
]
)
if
confidence
>
95
:
validated_confs2
.
append
(
layer_swings
)
confidences2
.
append
(
confidence
)
validated_ranks2
.
append
(
conf_ranks2
[
ind
])
else
:
failed_confs2
.
append
(
layer_swings
)
ind
+=
1
dumpValidatedConfigs
(
validated_confs1
,
Bench
.
result_dir_1
,
Bench
.
layer_file
,
"
validated_confs.txt
"
)
Bench
.
layer_file
,
"
validated_confs.txt
"
)
dumpValidatedConfigs
(
validated_confs2
,
Bench
.
result_dir_2
,
Bench
.
layer_file
,
"
validated_confs.txt
"
)
...
...
@@ -436,6 +464,8 @@ def runBenchValidation(bench_name):
dumpValidatedConfigs
(
failed_confs2
,
Bench
.
result_dir_2
,
Bench
.
layer_file
,
"
failed_confs.txt
"
)
dumpRankings
(
validated_ranks1
,
Bench
.
result_dir_1
,
"
validated_ranks.txt
"
)
dumpRankings
(
validated_ranks2
,
Bench
.
result_dir_2
,
"
validated_ranks.txt
"
)
dumpConfigConfidence
(
validated_confs1
,
confidences1
,
Bench
.
result_dir_1
,
Bench
.
layer_file
)
...
...
@@ -451,9 +481,9 @@ def runBenchValidation(bench_name):
def
runTunerValidation
():
#
runBenchValidation("
vgg16
_cifar10")
#
runBenchValidation("
alexnet
_cifar10")
runBenchValidation
(
"
alexnet2_cifar10
"
)
runBenchValidation
(
"
alexnet
_cifar10
"
)
runBenchValidation
(
"
vgg16
_cifar10
"
)
#
runBenchValidation("alexnet2_cifar10")
def
computeLayerSwings
():
...
...
@@ -461,6 +491,14 @@ def computeLayerSwings():
computeBenchSwings
(
"
alexnet_cifar10
"
)
computeBenchSwings
(
"
alexnet2_cifar10
"
)
def
runPromiseTuner
():
runPromiseBench
(
"
vgg16_cifar10
"
)
#runPromiseBench("alexnet2_cifar10")
runPromiseBench
(
"
alexnet_cifar10
"
)
def
runPromiseValidation
():
return
...
...
@@ -468,17 +506,17 @@ def runPromiseValidation():
def
runAutotuner
():
runTunerBench
(
"
resnet18_cifar10
"
)
#
runTunerBench("resnet18_cifar10")
#runTunerBench("vgg16_cifar10")
#runTunerBench("alexnet_cifar10")
#
runTunerBench("alexnet2_cifar10")
runTunerBench
(
"
alexnet2_cifar10
"
)
if
__name__
==
"
__main__
"
:
runAutotuner
()
#
runAutotuner()
#
runTunerValidation()
runTunerValidation
()
#computeLayerSwings()
...
...
This diff is collapsed.
Click to expand it.
llvm/projects/hpvm-tensor-rt/bin/swing_selection.py
+
10
−
4
View file @
4be13187
...
...
@@ -174,7 +174,7 @@ def getLayerSwings(layer_desc, configurations):
print
(
config_vals
[
index
],
config_vals
[
index
+
1
])
promise_swing
=
max
(
config_vals
[
index
],
config_vals
[
index
+
1
])
stride
=
len
(
layer_desc
[
layer_index
])
print
(
"
*stride =
"
,
stride
)
#
print ("*stride = ", stride)
index
+=
stride
swing_vals
.
append
(
promise_swing
)
...
...
@@ -204,6 +204,7 @@ def dumpLayerTargets(targets, tuned_result_dir, layer_desc_file):
layer_desc
=
loadLayerDesc
(
layer_desc_file
)
print
(
layer_desc
)
file_names
=
[]
configurations
=
[]
for
e
,
file_configs
in
targets
.
items
():
for
name
,
config
in
file_configs
.
items
():
...
...
@@ -211,8 +212,13 @@ def dumpLayerTargets(targets, tuned_result_dir, layer_desc_file):
for
c
in
config
:
config_vals
.
append
(
c
)
print
(
config_vals
)
configurations
.
append
(
config_vals
)
rank
=
e
+
"
_
"
+
"
_
"
.
join
(
name
.
split
(
"
_
"
)[
-
2
:])
file_names
.
append
(
rank
)
# NOTE: get PROMISE swing values corresponding to each layer
layer_swings
=
getLayerSwings
(
layer_desc
,
configurations
)
...
...
@@ -245,7 +251,7 @@ def dumpLayerTargets(targets, tuned_result_dir, layer_desc_file):
f
.
close
()
print
(
layer_swings
)
return
layer_swings
return
layer_swings
,
file_names
...
...
@@ -264,11 +270,11 @@ def computeLayerTargets(tuned_result_dir, layer_desc_file):
dumpTargets
(
targets_file_path
,
targets
)
layer_swings
=
dumpLayerTargets
(
targets
,
tuned_result_dir
,
layer_desc_file
)
layer_swings
,
file_names
=
dumpLayerTargets
(
targets
,
tuned_result_dir
,
layer_desc_file
)
replaceFirstLayer
(
layer_swings
)
return
layer_swings
return
layer_swings
,
file_names
# Externally-called function
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment