Skip to content
Snippets Groups Projects
Commit f602d4b5 authored by Yifan Zhao's avatar Yifan Zhao
Browse files

Added logging utils

parent 7a84b294
No related branches found
No related tags found
No related merge requests found
from ._logging import config_pylogger
from .approxapp import ApproxApp, ApproxKnob, ApproxTuner
from .approxes import get_knobs_from_file
from .modeledapp import (IPerfModel, IQoSModel, LinearPerfModel, ModeledApp,
......
import logging
import os
from logging import config
from pathlib import Path
from typing import Union
import tqdm
PathLike = Union[Path, str]
class TqdmStreamHandler(logging.Handler):
"""tqdm-friendly logging handler. Uses tqdm.write instead of print for logging."""
def __init__(self, level=logging.NOTSET):
super().__init__(level)
def emit(self, record):
try:
msg = self.format(record)
tqdm.tqdm.write(msg)
self.flush()
except (KeyboardInterrupt, SystemExit, RecursionError):
raise
except:
self.handleError(record)
# noinspection PyTypeChecker
_last_applied_config: dict = None
def config_pylogger(
filename: str = None, output_dir: PathLike = None, verbose: bool = False
) -> logging.Logger:
"""Configure the Python logger.
For each execution of the application, we'd like to create a unique log file.
By default this file is named using the date and time of day, so that it can be sorted by recency.
You can also name your filename or choose the log directory.
"""
import time
timestr = time.strftime("%Y.%m.%d-%H%M%S")
filename = filename or timestr
output_dir = Path(output_dir or ".")
if not os.path.exists(output_dir):
os.makedirs(output_dir)
file_path = output_dir / filename
global _last_applied_config
_last_applied_config = d = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"simple": {"format": "%(levelname)s %(name)s: " "%(message)s"},
"detailed": {
"format": "[%(asctime)-15s] "
"%(levelname)7s %(name)s: "
"%(message)s "
"@%(filename)s:%(lineno)d"
},
},
"handlers": {
"console": {
"()": TqdmStreamHandler,
"level": "INFO",
"formatter": "simple",
},
"file": {
"class": "logging.FileHandler",
"filename": file_path.as_posix(),
"mode": "a", # Because we may apply this config again, want to keep existing content
"formatter": "detailed",
},
},
"root": {
"level": "DEBUG" if verbose else "INFO",
"handlers": ["console", "file"],
},
}
config.dictConfig(d)
msglogger = logging.getLogger()
msglogger.info(f"Log file for this run: {file_path}")
return msglogger
def override_opentuner_config():
if _last_applied_config is not None:
config.dictConfig(_last_applied_config)
if Path("opentuner.log").is_file():
Path("opentuner.log").unlink()
......@@ -8,6 +8,8 @@ from opentuner.measurement.interface import MeasurementInterface
from opentuner.search.manipulator import (ConfigurationManipulator,
EnumParameter)
from ._logging import override_opentuner_config
msg_logger = logging.getLogger(__name__)
KnobsT = Dict[str, str]
PathLike = Union[Path, str]
......@@ -90,8 +92,11 @@ class ApproxTuner:
tuner = TunerInterface(
opentuner_args, self.app, qos_tuner_threshold, qos_keep_threshold, max_iter,
)
trm = TuningRunMain(tuner, opentuner_args)
# TuningRunMain.__init__ initializes its own logger, so we'll override it and use ours
override_opentuner_config()
# This is where opentuner runs
TuningRunMain(tuner, opentuner_args).main()
trm.main()
def get_all_configs(self) -> List[Config]:
from ._dbloader import read_opentuner_db
......
import unittest
import torch
from torch.utils.data.dataset import Subset
from predtuner.approxes import get_knobs_from_file
from predtuner.torchapp import TorchApp
from predtuner.torchutil import accuracy
import torch
from model_zoo import CIFAR, VGG16Cifar10
from predtuner import TorchApp, accuracy, config_pylogger, get_knobs_from_file
from torch.nn import Conv2d, Linear
from torch.utils.data.dataloader import DataLoader
from model_zoo import VGG16Cifar10, CIFAR
from torch.utils.data.dataset import Subset
msg_logger = config_pylogger(output_dir="/tmp", verbose=True)
class TestTorchApp(unittest.TestCase):
def setUp(self):
dataset = CIFAR.from_file("model_data/cifar10/input.bin", "model_data/cifar10/labels.bin")
dataset = CIFAR.from_file(
"model_data/cifar10/input.bin", "model_data/cifar10/labels.bin"
)
self.dataset = Subset(dataset, range(100))
self.module = VGG16Cifar10()
self.module.load_state_dict(torch.load("model_data/vgg16_cifar10.pth.tar"))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment