From 319369331dbc0426fe50c3d9143f502f67ce3fe8 Mon Sep 17 00:00:00 2001
From: Yifan Zhao <yifanz16@illinois.edu>
Date: Sat, 13 Mar 2021 22:33:08 -0600
Subject: [PATCH] Updated predtuner and test script

---
 hpvm/projects/predtuner                       |  2 +-
 .../dnn_benchmarks/pytorch/test_tuning.py     | 75 ++++++++++++-------
 2 files changed, 49 insertions(+), 28 deletions(-)

diff --git a/hpvm/projects/predtuner b/hpvm/projects/predtuner
index 6ff4237cf4..cef0789732 160000
--- a/hpvm/projects/predtuner
+++ b/hpvm/projects/predtuner
@@ -1 +1 @@
-Subproject commit 6ff4237cf4386ebb4fcaeb5e448ef6eac8a41c91
+Subproject commit cef07897325f0427246a1f2d71fbf9562656465f
diff --git a/hpvm/test/dnn_benchmarks/pytorch/test_tuning.py b/hpvm/test/dnn_benchmarks/pytorch/test_tuning.py
index 32d982187a..d0451b70b4 100644
--- a/hpvm/test/dnn_benchmarks/pytorch/test_tuning.py
+++ b/hpvm/test/dnn_benchmarks/pytorch/test_tuning.py
@@ -2,9 +2,9 @@ import os
 import shutil
 import site
 from pathlib import Path
-from subprocess import Popen
 
 import torch
+from predtuner import config_pylogger
 from predtuner.pipedbin import PipedBinaryApp
 from torch2hpvm import BinDataset, ModelExporter
 from torch.nn import Module
@@ -12,32 +12,38 @@ from torch.nn import Module
 site.addsitedir(os.path.dirname(__file__))
 import dnn
 
+# Set up logger to put log file in /tmp
+msg_logger = config_pylogger(output_dir="/tmp", verbose=True)
+
+
 benchmarks = [
-    (dnn.LeNet, 1, 28, 5000, "lenet_mnist"),
-    (dnn.AlexNet, 3, 32, 5000, "alexnet_cifar10"),
-    (dnn.AlexNet2, 3, 32, 5000, "alexnet2_cifar10"),
-    (dnn.AlexNetImageNet, 3, 224, 500, "alexnet_imagenet"),
-    (dnn.MobileNet, 3, 32, 5000, "mobilenet_cifar10"),
-    (dnn.ResNet18, 3, 32, 5000, "resnet18_cifar10"),
-    (dnn.ResNet50, 3, 224, 100, "resnet50_imagenet"),
-    (dnn.VGG16Cifar10, 3, 32, 5000, "vgg16_cifar10"),
-    (dnn.VGG16Cifar100, 3, 32, 5000, "vgg16_cifar100"),
-    (dnn.VGG16ImageNet, 3, 224, 100, "vgg16_imagenet"),
+    (dnn.LeNet, 1, 28, 500, "lenet_mnist"),
+    (dnn.AlexNet, 3, 32, 500, "alexnet_cifar10"),
+    (dnn.AlexNet2, 3, 32, 500, "alexnet2_cifar10"),
+    (dnn.AlexNetImageNet, 3, 224, 100, "alexnet_imagenet"),
+    (dnn.MobileNet, 3, 32, 500, "mobilenet_cifar10"),
+    (dnn.ResNet18, 3, 32, 500, "resnet18_cifar10"),
+    (dnn.ResNet50, 3, 224, 50, "resnet50_imagenet"),
+    (dnn.VGG16Cifar10, 3, 32, 500, "vgg16_cifar10"),
+    (dnn.VGG16Cifar100, 3, 32, 500, "vgg16_cifar100"),
+    (dnn.VGG16ImageNet, 3, 224, 50, "vgg16_imagenet"),
 ]
+model_param = Path(__file__).parent / "../model_params"
+
+
+def generate(model_cls, nch, img_size, batch_size, pathname):
+    codegen_dir = Path(f"/tmp/{pathname}_tune")
+    build_dir = codegen_dir / "build"
+    metadata_file = codegen_dir / "ops.json"
+    binary_file = build_dir / pathname
+    build_dir = codegen_dir / "build"
+    # if binary_file.is_file() and metadata_file.is_file():
+    #     return binary_file, metadata_file
 
-self_folder = Path(__file__).parent
-model_cls, nch, img_size, batch_size, pathname = benchmarks[0]
-codegen_dir = Path(f"/tmp/{pathname}_tune")
-build_dir = codegen_dir / "build"
-metadata_file = codegen_dir / "ops.json"
-binary_file = build_dir / pathname
-conf_file = codegen_dir / ModelExporter.config_file_name
-if not binary_file.is_file() or not metadata_file.is_file():
     print(f"Generating {pathname} to {codegen_dir}")
     if codegen_dir.exists():
         shutil.rmtree(codegen_dir)
-
-    params = self_folder / "../model_params" / pathname
+    params = model_param / pathname
     dataset_shape = 5000, nch, img_size, img_size
     bin_tuneset = BinDataset(
         params / "tune_input.bin", params / "tune_labels.bin", dataset_shape
@@ -46,14 +52,29 @@ if not binary_file.is_file() or not metadata_file.is_file():
         params / "test_input.bin", params / "test_labels.bin", dataset_shape
     )
     model: Module = model_cls()
-    checkpoint = self_folder / "../model_params" / f"{pathname}.pth.tar"
+    checkpoint = model_param / f"{pathname}.pth.tar"
     model.load_state_dict(torch.load(checkpoint.as_posix()))
-
     exporter = ModelExporter(
         model, bin_tuneset, bin_testset, codegen_dir, target="hpvm_tensor_inspect"
     )
     exporter.generate(batch_size=batch_size).compile(binary_file, build_dir)
-app = PipedBinaryApp("test", binary_file, metadata_file)
-tuner = app.get_tuner()
-tuner.tune(100, 3.0, is_threshold_relative=True, perf_model="perf_linear", qos_model="qos_p1")
-tuner.dump_configs("configs.json")
+    return binary_file, metadata_file
+
+
+def main():
+    for model_cls, nch, img_size, batch_size, pathname in benchmarks:
+        print(f"Testing {pathname}")
+        binary_file, metadata_file = generate(
+            model_cls, nch, img_size, batch_size, pathname
+        )
+        app = PipedBinaryApp("test", binary_file, metadata_file)
+        tuner = app.get_tuner()
+        tuner.tune(100, 3.0, 3.0, True, 50, cost_model="cost_linear")
+        tuner.dump_configs("configs.json")
+        fig = tuner.plot_configs(show_qos_loss=True)
+        fig.savefig("configs.png", dpi=300)
+        app.dump_hpvm_configs(tuner.best_configs, "hpvm_confs.txt")
+
+
+if __name__ == "__main__":
+    main()
\ No newline at end of file
-- 
GitLab