diff --git a/predtuner/__init__.py b/predtuner/__init__.py index 9d13bc677ac3170a82bb04a64e66aaadd3adbc40..92372504d1a9232aee0de40980fa5b91f52cb119 100644 --- a/predtuner/__init__.py +++ b/predtuner/__init__.py @@ -1,3 +1,4 @@ +from ._logging import config_pylogger from .approxapp import ApproxApp, ApproxKnob, ApproxTuner from .approxes import get_knobs_from_file from .modeledapp import (IPerfModel, IQoSModel, LinearPerfModel, ModeledApp, diff --git a/predtuner/_logging.py b/predtuner/_logging.py new file mode 100644 index 0000000000000000000000000000000000000000..9623135340e049da275780d2489c6da1356cdeeb --- /dev/null +++ b/predtuner/_logging.py @@ -0,0 +1,93 @@ +import logging +import os +from logging import config +from pathlib import Path +from typing import Union + +import tqdm + +PathLike = Union[Path, str] + + +class TqdmStreamHandler(logging.Handler): + """tqdm-friendly logging handler. Uses tqdm.write instead of print for logging.""" + + def __init__(self, level=logging.NOTSET): + super().__init__(level) + + def emit(self, record): + try: + msg = self.format(record) + tqdm.tqdm.write(msg) + self.flush() + except (KeyboardInterrupt, SystemExit, RecursionError): + raise + except: + self.handleError(record) + + +# noinspection PyTypeChecker +_last_applied_config: dict = None + + +def config_pylogger( + filename: str = None, output_dir: PathLike = None, verbose: bool = False +) -> logging.Logger: + """Configure the Python logger. + + For each execution of the application, we'd like to create a unique log file. + By default this file is named using the date and time of day, so that it can be sorted by recency. + You can also name your filename or choose the log directory. + """ + import time + + timestr = time.strftime("%Y.%m.%d-%H%M%S") + filename = filename or timestr + output_dir = Path(output_dir or ".") + if not os.path.exists(output_dir): + os.makedirs(output_dir) + file_path = output_dir / filename + + global _last_applied_config + _last_applied_config = d = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "simple": {"format": "%(levelname)s %(name)s: " "%(message)s"}, + "detailed": { + "format": "[%(asctime)-15s] " + "%(levelname)7s %(name)s: " + "%(message)s " + "@%(filename)s:%(lineno)d" + }, + }, + "handlers": { + "console": { + "()": TqdmStreamHandler, + "level": "INFO", + "formatter": "simple", + }, + "file": { + "class": "logging.FileHandler", + "filename": file_path.as_posix(), + "mode": "a", # Because we may apply this config again, want to keep existing content + "formatter": "detailed", + }, + }, + "root": { + "level": "DEBUG" if verbose else "INFO", + "handlers": ["console", "file"], + }, + } + config.dictConfig(d) + + msglogger = logging.getLogger() + msglogger.info(f"Log file for this run: {file_path}") + return msglogger + + +def override_opentuner_config(): + if _last_applied_config is not None: + config.dictConfig(_last_applied_config) + if Path("opentuner.log").is_file(): + Path("opentuner.log").unlink() diff --git a/predtuner/approxapp.py b/predtuner/approxapp.py index 192cfdbb3d6997267fd2d421e6ac96aa96b8ba43..2c512566e9f210634dbba367193a4499ea716d50 100644 --- a/predtuner/approxapp.py +++ b/predtuner/approxapp.py @@ -8,6 +8,8 @@ from opentuner.measurement.interface import MeasurementInterface from opentuner.search.manipulator import (ConfigurationManipulator, EnumParameter) +from ._logging import override_opentuner_config + msg_logger = logging.getLogger(__name__) KnobsT = Dict[str, str] PathLike = Union[Path, str] @@ -90,8 +92,11 @@ class ApproxTuner: tuner = TunerInterface( opentuner_args, self.app, qos_tuner_threshold, qos_keep_threshold, max_iter, ) + trm = TuningRunMain(tuner, opentuner_args) + # TuningRunMain.__init__ initializes its own logger, so we'll override it and use ours + override_opentuner_config() # This is where opentuner runs - TuningRunMain(tuner, opentuner_args).main() + trm.main() def get_all_configs(self) -> List[Config]: from ._dbloader import read_opentuner_db diff --git a/test/test_torchapp.py b/test/test_torchapp.py index cb6fd5d689fb485af1b5fd941964c97f12e19212..7fc0eea7b3402b8bfa29640b51c19c333faa315d 100644 --- a/test/test_torchapp.py +++ b/test/test_torchapp.py @@ -1,19 +1,20 @@ import unittest -import torch - -from torch.utils.data.dataset import Subset -from predtuner.approxes import get_knobs_from_file -from predtuner.torchapp import TorchApp -from predtuner.torchutil import accuracy +import torch +from model_zoo import CIFAR, VGG16Cifar10 +from predtuner import TorchApp, accuracy, config_pylogger, get_knobs_from_file from torch.nn import Conv2d, Linear from torch.utils.data.dataloader import DataLoader -from model_zoo import VGG16Cifar10, CIFAR +from torch.utils.data.dataset import Subset + +msg_logger = config_pylogger(output_dir="/tmp", verbose=True) class TestTorchApp(unittest.TestCase): def setUp(self): - dataset = CIFAR.from_file("model_data/cifar10/input.bin", "model_data/cifar10/labels.bin") + dataset = CIFAR.from_file( + "model_data/cifar10/input.bin", "model_data/cifar10/labels.bin" + ) self.dataset = Subset(dataset, range(100)) self.module = VGG16Cifar10() self.module.load_state_dict(torch.load("model_data/vgg16_cifar10.pth.tar"))