Skip to content
GitLab
Explore
Sign in
Primary navigation
Search or go to…
Project
P
predtuner
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
Community forum
Contribute to GitLab
Provide feedback
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
llvm
predtuner
Commits
f602d4b5
Commit
f602d4b5
authored
4 years ago
by
Yifan Zhao
Browse files
Options
Downloads
Patches
Plain Diff
Added logging utils
parent
7a84b294
No related branches found
No related tags found
No related merge requests found
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
predtuner/__init__.py
+1
-0
1 addition, 0 deletions
predtuner/__init__.py
predtuner/_logging.py
+93
-0
93 additions, 0 deletions
predtuner/_logging.py
predtuner/approxapp.py
+6
-1
6 additions, 1 deletion
predtuner/approxapp.py
test/test_torchapp.py
+9
-8
9 additions, 8 deletions
test/test_torchapp.py
with
109 additions
and
9 deletions
predtuner/__init__.py
+
1
−
0
View file @
f602d4b5
from
._logging
import
config_pylogger
from
.approxapp
import
ApproxApp
,
ApproxKnob
,
ApproxTuner
from
.approxes
import
get_knobs_from_file
from
.modeledapp
import
(
IPerfModel
,
IQoSModel
,
LinearPerfModel
,
ModeledApp
,
...
...
This diff is collapsed.
Click to expand it.
predtuner/_logging.py
0 → 100644
+
93
−
0
View file @
f602d4b5
import
logging
import
os
from
logging
import
config
from
pathlib
import
Path
from
typing
import
Union
import
tqdm
PathLike
=
Union
[
Path
,
str
]
class
TqdmStreamHandler
(
logging
.
Handler
):
"""
tqdm-friendly logging handler. Uses tqdm.write instead of print for logging.
"""
def
__init__
(
self
,
level
=
logging
.
NOTSET
):
super
().
__init__
(
level
)
def
emit
(
self
,
record
):
try
:
msg
=
self
.
format
(
record
)
tqdm
.
tqdm
.
write
(
msg
)
self
.
flush
()
except
(
KeyboardInterrupt
,
SystemExit
,
RecursionError
):
raise
except
:
self
.
handleError
(
record
)
# noinspection PyTypeChecker
_last_applied_config
:
dict
=
None
def
config_pylogger
(
filename
:
str
=
None
,
output_dir
:
PathLike
=
None
,
verbose
:
bool
=
False
)
->
logging
.
Logger
:
"""
Configure the Python logger.
For each execution of the application, we
'
d like to create a unique log file.
By default this file is named using the date and time of day, so that it can be sorted by recency.
You can also name your filename or choose the log directory.
"""
import
time
timestr
=
time
.
strftime
(
"
%Y.%m.%d-%H%M%S
"
)
filename
=
filename
or
timestr
output_dir
=
Path
(
output_dir
or
"
.
"
)
if
not
os
.
path
.
exists
(
output_dir
):
os
.
makedirs
(
output_dir
)
file_path
=
output_dir
/
filename
global
_last_applied_config
_last_applied_config
=
d
=
{
"
version
"
:
1
,
"
disable_existing_loggers
"
:
False
,
"
formatters
"
:
{
"
simple
"
:
{
"
format
"
:
"
%(levelname)s %(name)s:
"
"
%(message)s
"
},
"
detailed
"
:
{
"
format
"
:
"
[%(asctime)-15s]
"
"
%(levelname)7s %(name)s:
"
"
%(message)s
"
"
@%(filename)s:%(lineno)d
"
},
},
"
handlers
"
:
{
"
console
"
:
{
"
()
"
:
TqdmStreamHandler
,
"
level
"
:
"
INFO
"
,
"
formatter
"
:
"
simple
"
,
},
"
file
"
:
{
"
class
"
:
"
logging.FileHandler
"
,
"
filename
"
:
file_path
.
as_posix
(),
"
mode
"
:
"
a
"
,
# Because we may apply this config again, want to keep existing content
"
formatter
"
:
"
detailed
"
,
},
},
"
root
"
:
{
"
level
"
:
"
DEBUG
"
if
verbose
else
"
INFO
"
,
"
handlers
"
:
[
"
console
"
,
"
file
"
],
},
}
config
.
dictConfig
(
d
)
msglogger
=
logging
.
getLogger
()
msglogger
.
info
(
f
"
Log file for this run:
{
file_path
}
"
)
return
msglogger
def
override_opentuner_config
():
if
_last_applied_config
is
not
None
:
config
.
dictConfig
(
_last_applied_config
)
if
Path
(
"
opentuner.log
"
).
is_file
():
Path
(
"
opentuner.log
"
).
unlink
()
This diff is collapsed.
Click to expand it.
predtuner/approxapp.py
+
6
−
1
View file @
f602d4b5
...
...
@@ -8,6 +8,8 @@ from opentuner.measurement.interface import MeasurementInterface
from
opentuner.search.manipulator
import
(
ConfigurationManipulator
,
EnumParameter
)
from
._logging
import
override_opentuner_config
msg_logger
=
logging
.
getLogger
(
__name__
)
KnobsT
=
Dict
[
str
,
str
]
PathLike
=
Union
[
Path
,
str
]
...
...
@@ -90,8 +92,11 @@ class ApproxTuner:
tuner
=
TunerInterface
(
opentuner_args
,
self
.
app
,
qos_tuner_threshold
,
qos_keep_threshold
,
max_iter
,
)
trm
=
TuningRunMain
(
tuner
,
opentuner_args
)
# TuningRunMain.__init__ initializes its own logger, so we'll override it and use ours
override_opentuner_config
()
# This is where opentuner runs
TuningRunMain
(
tuner
,
opentuner_args
)
.
main
()
trm
.
main
()
def
get_all_configs
(
self
)
->
List
[
Config
]:
from
._dbloader
import
read_opentuner_db
...
...
This diff is collapsed.
Click to expand it.
test/test_torchapp.py
+
9
−
8
View file @
f602d4b5
import
unittest
import
torch
from
torch.utils.data.dataset
import
Subset
from
predtuner.approxes
import
get_knobs_from_file
from
predtuner.torchapp
import
TorchApp
from
predtuner
.torchutil
import
accuracy
import
torch
from
model_zoo
import
CIFAR
,
VGG16Cifar10
from
predtuner
import
TorchApp
,
accuracy
,
config_pylogger
,
get_knobs_from_file
from
torch.nn
import
Conv2d
,
Linear
from
torch.utils.data.dataloader
import
DataLoader
from
model_zoo
import
VGG16Cifar10
,
CIFAR
from
torch.utils.data.dataset
import
Subset
msg_logger
=
config_pylogger
(
output_dir
=
"
/tmp
"
,
verbose
=
True
)
class
TestTorchApp
(
unittest
.
TestCase
):
def
setUp
(
self
):
dataset
=
CIFAR
.
from_file
(
"
model_data/cifar10/input.bin
"
,
"
model_data/cifar10/labels.bin
"
)
dataset
=
CIFAR
.
from_file
(
"
model_data/cifar10/input.bin
"
,
"
model_data/cifar10/labels.bin
"
)
self
.
dataset
=
Subset
(
dataset
,
range
(
100
))
self
.
module
=
VGG16Cifar10
()
self
.
module
.
load_state_dict
(
torch
.
load
(
"
model_data/vgg16_cifar10.pth.tar
"
))
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment