Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
H
hpvm-release
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Model registry
Operate
Environments
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
llvm
hpvm-release
Commits
ac6d7146
Commit
ac6d7146
authored
5 years ago
by
Elizabeth
Browse files
Options
Downloads
Patches
Plain Diff
Started adding pydoc
parent
f24a8d81
No related branches found
Branches containing commit
No related tags found
Tags containing commit
No related merge requests found
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
llvm/projects/hpvm-tensor-rt/build_pldi/table_generator.py
+99
-36
99 additions, 36 deletions
llvm/projects/hpvm-tensor-rt/build_pldi/table_generator.py
with
99 additions
and
36 deletions
llvm/projects/hpvm-tensor-rt/build_pldi/table_generator.py
+
99
−
36
View file @
ac6d7146
...
...
@@ -6,59 +6,102 @@ import shutil
from
collections
import
defaultdict
class
TableGenerator
:
'''
Stores all precision conversions used.
'''
precision_conversions
=
frozenset
([
"
h2f
"
,
"
f2h
"
])
def
__init__
(
self
,
dir_name
,
iters
,
profiler_binary_name
):
self
.
__dir_name
=
dir_name
def
__init__
(
self
,
dir_path
,
iters
,
profiler_binary_name
):
'''
Args:
dir_path: Path of directory containing network binaries
iters: Number of iterations to run each binary for
profiler_binary_name: Name of offline profiler binary to run
'''
self
.
__dir_path
=
dir_path
#
directory/path/network_name (last item in path)
self
.
__network_name
=
os
.
path
.
split
(
dir_
name
)[
-
1
]
#
Name of the actual directory
self
.
__network_name
=
os
.
path
.
split
(
dir_
path
)[
-
1
]
self
.
__iters
=
iters
self
.
__profiler_binary_name
=
profiler_binary_name
self
.
__results_dir_name
=
"
%s_results
"
%
self
.
__dir_name
# Path to results directory
self
.
__results_dir_path
=
"
%s_results
"
%
self
.
__dir_path
# Outputted table file
self
.
__table_filename
=
"
%s_tensors.txt
"
%
self
.
__network_name
def
__is_binary
(
self
,
file_path
):
# Binary name must start with the network name as per our naming standards
# Nested default dictionary of default dicts
self
.
__table
=
self
.
__build_nested_default_dict
()
def
generate_table
(
self
):
'''
Generates a table file called <network_name>_tensors.txt in the
'''
self
.
__build_internal_table
()
self
.
__output_table
()
def
__should_execute_file
(
self
,
file_path
):
'''
Checks if the file at the given file path is a binary that should be run
by the profiler. Must exist, be a binary, and must start with the network
name as per our naming standards.
Args:
file_path: Path of the file to check
'''
return
os
.
path
.
isfile
(
file_path
)
and
os
.
access
(
file_path
,
os
.
X_OK
)
and
\
file_path
.
find
(
self
.
__network_name
)
!=
-
1
def
run_binaries_in_input_dir
(
self
):
if
not
os
.
path
.
isdir
(
self
.
__dir_name
):
print
(
"
ERROR: Directory %s not found
"
%
self
.
__dir_name
)
def
run_inputted_binaries
(
self
):
'''
Invokes the profiler to run all appropriate binaries (must start with the network
name) in the inputted directory. Result files generated by the profiler are
stored in the results file directory and are named <binary_name>.txt. These results
files are then parsed in a later step to generate the table
'''
if
not
os
.
path
.
isdir
(
self
.
__dir_path
):
print
(
"
ERROR: Directory %s not found
"
%
self
.
__dir_path
)
exit
(
1
)
try
:
os
.
mkdir
(
self
.
__results_dir_
name
)
os
.
mkdir
(
self
.
__results_dir_
path
)
except
OSError
:
if
os
.
path
.
isdir
(
self
.
__results_dir_
name
):
if
os
.
path
.
isdir
(
self
.
__results_dir_
path
):
print
(
"
Directory already exists. Clearing directory.
"
)
for
old_file
in
glob
.
glob
(
os
.
path
.
join
(
self
.
__results_dir_
name
,
"
*
"
)):
for
old_file
in
glob
.
glob
(
os
.
path
.
join
(
self
.
__results_dir_
path
,
"
*
"
)):
os
.
remove
(
old_file
)
else
:
print
(
"
ERROR: Directory doesn
'
t exist but failed to create dir
"
)
for
binary_name
in
os
.
listdir
(
self
.
__dir_
name
):
binary_path
=
os
.
path
.
join
(
self
.
__dir_
name
,
binary_name
)
for
binary_name
in
os
.
listdir
(
self
.
__dir_
path
):
binary_path
=
os
.
path
.
join
(
self
.
__dir_
path
,
binary_name
)
if
not
self
.
__
is_binary
(
binary_path
):
if
not
self
.
__
should_execute_file
(
binary_path
):
continue
if
not
os
.
path
.
isfile
(
binary_path
):
print
(
"
ERROR: Binary %s not found
"
%
binary_path
)
exit
(
1
)
output_file
=
os
.
path
.
join
(
self
.
__results_dir_name
,
binary_name
+
"
.txt
"
)
output_file
=
os
.
path
.
join
(
self
.
__results_dir_path
,
binary_name
+
"
.txt
"
)
# No stdout/stderr piping needed for now
subprocess
.
Popen
([
profiler_binary_name
,
binary_path
,
str
(
self
.
__iters
),
\
output_file
]).
communicate
()
def
__get_approximation_type
(
self
,
results_filename
):
'''
Parses a given results filename for the approximation type.
Format assumption: <network_name>_<approx_type>.txt
Args:
results_filename: Name of results file
Returns:
the approximation technique (ex: fp16)
'''
approx_type_start_ind
=
results_filename
.
find
(
self
.
__network_name
)
\
+
len
(
self
.
__network_name
)
+
1
# + 1 to account for _ delimiter
approx_type_end_ind
=
results_filename
.
find
(
"
.txt
"
)
...
...
@@ -66,34 +109,54 @@ class TableGenerator:
def
__parse_tensor_operation_line
(
self
,
tensor_op_line
):
print
(
tensor_op_line
)
'''
Parses a tensor operation line (within a output file from the offline
profiler for the operation name, the total time used, and the total
energy used
Args:
tensor_op_line: Tensor operation line from output file
Returns:
operation name
total time used
total energy used
'''
line_as_list
=
tensor_op_line
.
split
(
"
,
"
)
return
line_as_list
[
0
],
line_as_list
[
1
],
line_as_list
[
2
]
def
__build_nested_default_dict
(
self
):
'''
Builds a nested default dictionary with an arbitrary number of levels
'''
return
defaultdict
(
self
.
__build_nested_default_dict
)
# h2f or f2h
def
__get_original_operation_name
(
self
,
op_name
):
'''
Parses an operation name containing _<conversion type> for the original
operation name.
Format assumption: <original_op_name>_<conversion type>
Args:
op_name: Name of the operation
Returns:
the original operation name
'''
underscore_ind
=
op_name
.
find
(
"
_
"
)
return
op_name
[
:
underscore_ind
],
op_name
[
underscore_ind
+
1
:
]
def
generate_table
(
self
):
self
.
__table
=
self
.
__build_nested_default_dict
()
self
.
__build_internal_table
()
self
.
__output_table
()
def
__build_internal_table
(
self
):
for
results_file_name
in
os
.
listdir
(
self
.
__results_dir_
name
):
for
results_file_name
in
os
.
listdir
(
self
.
__results_dir_
path
):
# Ignore if it's not a results file
if
results_file_name
==
self
.
__table_filename
or
\
not
results_file_name
.
startswith
(
self
.
__network_name
):
continue
approx_type
=
self
.
__get_approximation_type
(
results_file_name
)
results_file
=
open
(
os
.
path
.
join
(
self
.
__results_dir_
name
,
results_file_name
),
"
r
"
)
results_file
=
open
(
os
.
path
.
join
(
self
.
__results_dir_
path
,
results_file_name
),
"
r
"
)
for
line
in
results_file
:
line
=
line
.
strip
()
...
...
@@ -122,7 +185,7 @@ class TableGenerator:
results_file
.
close
()
def
__output_table
(
self
):
table_file_path
=
os
.
path
.
join
(
self
.
__results_dir_
name
,
self
.
__table_filename
)
table_file_path
=
os
.
path
.
join
(
self
.
__results_dir_
path
,
self
.
__table_filename
)
# TODO un hard code this
soc_operations_file_name
=
os
.
path
.
join
(
"
/home/nvidia/soc_simulator
"
,
"
%s_cifar10
"
%
self
.
__network_name
,
"
%s_ops.txt
"
%
self
.
__network_name
)
...
...
@@ -184,9 +247,9 @@ class TableGenerator:
# Try doing this per layer first
pass
binary_dir_
name
=
"
/home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/build_pldi/mobilenet
"
binary_dir_
path
=
"
/home/nvidia/Gitlab/hpvm/llvm/projects/hpvm-tensor-rt/build_pldi/mobilenet
"
num_iters
=
1
profiler_binary_name
=
"
/home/nvidia/awesome_profiler/pp
"
table_gen
=
TableGenerator
(
binary_dir_
name
,
num_iters
,
profiler_binary_name
)
#table_gen.run_
binaries_in_input_dir
()
table_gen
.
generate_table
()
table_gen
=
TableGenerator
(
binary_dir_
path
,
num_iters
,
profiler_binary_name
)
#table_gen.run_
inputted_binaries
()
#
table_gen.generate_table()
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment