From f602d4b547fb7f06227f7dc8deae0482f55a585b Mon Sep 17 00:00:00 2001
From: Yifan Zhao <yifanz16@illinois.edu>
Date: Sat, 23 Jan 2021 07:10:28 -0600
Subject: [PATCH] Added logging utils

---
 predtuner/__init__.py  |  1 +
 predtuner/_logging.py  | 93 ++++++++++++++++++++++++++++++++++++++++++
 predtuner/approxapp.py |  7 +++-
 test/test_torchapp.py  | 17 ++++----
 4 files changed, 109 insertions(+), 9 deletions(-)
 create mode 100644 predtuner/_logging.py

diff --git a/predtuner/__init__.py b/predtuner/__init__.py
index 9d13bc6..9237250 100644
--- a/predtuner/__init__.py
+++ b/predtuner/__init__.py
@@ -1,3 +1,4 @@
+from ._logging import config_pylogger
 from .approxapp import ApproxApp, ApproxKnob, ApproxTuner
 from .approxes import get_knobs_from_file
 from .modeledapp import (IPerfModel, IQoSModel, LinearPerfModel, ModeledApp,
diff --git a/predtuner/_logging.py b/predtuner/_logging.py
new file mode 100644
index 0000000..9623135
--- /dev/null
+++ b/predtuner/_logging.py
@@ -0,0 +1,93 @@
+import logging
+import os
+from logging import config
+from pathlib import Path
+from typing import Union
+
+import tqdm
+
+PathLike = Union[Path, str]
+
+
+class TqdmStreamHandler(logging.Handler):
+    """tqdm-friendly logging handler. Uses tqdm.write instead of print for logging."""
+
+    def __init__(self, level=logging.NOTSET):
+        super().__init__(level)
+
+    def emit(self, record):
+        try:
+            msg = self.format(record)
+            tqdm.tqdm.write(msg)
+            self.flush()
+        except (KeyboardInterrupt, SystemExit, RecursionError):
+            raise
+        except:
+            self.handleError(record)
+
+
+# noinspection PyTypeChecker
+_last_applied_config: dict = None
+
+
+def config_pylogger(
+    filename: str = None, output_dir: PathLike = None, verbose: bool = False
+) -> logging.Logger:
+    """Configure the Python logger.
+
+    For each execution of the application, we'd like to create a unique log file.
+    By default this file is named using the date and time of day, so that it can be sorted by recency.
+    You can also name your filename or choose the log directory.
+    """
+    import time
+
+    timestr = time.strftime("%Y.%m.%d-%H%M%S")
+    filename = filename or timestr
+    output_dir = Path(output_dir or ".")
+    if not os.path.exists(output_dir):
+        os.makedirs(output_dir)
+    file_path = output_dir / filename
+
+    global _last_applied_config
+    _last_applied_config = d = {
+        "version": 1,
+        "disable_existing_loggers": False,
+        "formatters": {
+            "simple": {"format": "%(levelname)s %(name)s: " "%(message)s"},
+            "detailed": {
+                "format": "[%(asctime)-15s] "
+                "%(levelname)7s %(name)s: "
+                "%(message)s "
+                "@%(filename)s:%(lineno)d"
+            },
+        },
+        "handlers": {
+            "console": {
+                "()": TqdmStreamHandler,
+                "level": "INFO",
+                "formatter": "simple",
+            },
+            "file": {
+                "class": "logging.FileHandler",
+                "filename": file_path.as_posix(),
+                "mode": "a",  # Because we may apply this config again, want to keep existing content
+                "formatter": "detailed",
+            },
+        },
+        "root": {
+            "level": "DEBUG" if verbose else "INFO",
+            "handlers": ["console", "file"],
+        },
+    }
+    config.dictConfig(d)
+
+    msglogger = logging.getLogger()
+    msglogger.info(f"Log file for this run: {file_path}")
+    return msglogger
+
+
+def override_opentuner_config():
+    if _last_applied_config is not None:
+        config.dictConfig(_last_applied_config)
+    if Path("opentuner.log").is_file():
+        Path("opentuner.log").unlink()
diff --git a/predtuner/approxapp.py b/predtuner/approxapp.py
index 192cfdb..2c51256 100644
--- a/predtuner/approxapp.py
+++ b/predtuner/approxapp.py
@@ -8,6 +8,8 @@ from opentuner.measurement.interface import MeasurementInterface
 from opentuner.search.manipulator import (ConfigurationManipulator,
                                           EnumParameter)
 
+from ._logging import override_opentuner_config
+
 msg_logger = logging.getLogger(__name__)
 KnobsT = Dict[str, str]
 PathLike = Union[Path, str]
@@ -90,8 +92,11 @@ class ApproxTuner:
         tuner = TunerInterface(
             opentuner_args, self.app, qos_tuner_threshold, qos_keep_threshold, max_iter,
         )
+        trm = TuningRunMain(tuner, opentuner_args)
+        # TuningRunMain.__init__ initializes its own logger, so we'll override it and use ours
+        override_opentuner_config()
         # This is where opentuner runs
-        TuningRunMain(tuner, opentuner_args).main()
+        trm.main()
 
     def get_all_configs(self) -> List[Config]:
         from ._dbloader import read_opentuner_db
diff --git a/test/test_torchapp.py b/test/test_torchapp.py
index cb6fd5d..7fc0eea 100644
--- a/test/test_torchapp.py
+++ b/test/test_torchapp.py
@@ -1,19 +1,20 @@
 import unittest
-import torch
-
-from torch.utils.data.dataset import Subset
 
-from predtuner.approxes import get_knobs_from_file
-from predtuner.torchapp import TorchApp
-from predtuner.torchutil import accuracy
+import torch
+from model_zoo import CIFAR, VGG16Cifar10
+from predtuner import TorchApp, accuracy, config_pylogger, get_knobs_from_file
 from torch.nn import Conv2d, Linear
 from torch.utils.data.dataloader import DataLoader
-from model_zoo import VGG16Cifar10, CIFAR
+from torch.utils.data.dataset import Subset
+
+msg_logger = config_pylogger(output_dir="/tmp", verbose=True)
 
 
 class TestTorchApp(unittest.TestCase):
     def setUp(self):
-        dataset = CIFAR.from_file("model_data/cifar10/input.bin", "model_data/cifar10/labels.bin")
+        dataset = CIFAR.from_file(
+            "model_data/cifar10/input.bin", "model_data/cifar10/labels.bin"
+        )
         self.dataset = Subset(dataset, range(100))
         self.module = VGG16Cifar10()
         self.module.load_state_dict(torch.load("model_data/vgg16_cifar10.pth.tar"))
-- 
GitLab