diff --git a/hpvm/test/dnn_benchmarks/pytorch/CMakeLists.txt b/hpvm/test/dnn_benchmarks/pytorch/CMakeLists.txt
index 778593a57ddfc3a6abcc4ed045f02614535739f8..690c62f3878956faa981ad038b576041da0528da 100644
--- a/hpvm/test/dnn_benchmarks/pytorch/CMakeLists.txt
+++ b/hpvm/test/dnn_benchmarks/pytorch/CMakeLists.txt
@@ -1,18 +1,27 @@
-# --[ llvm-lit test setup
-# lit.cfg.py looks for tests in CMAKE_CURRENT_BINARY_DIR (see lit.cfg.py)
-# as most of the tests require some kind of compilation / generation
-# which is best done over there.
+# --[ llvm-lit test setup for test_frontend/
 configure_lit_site_cfg(
   ../../lit.site.cfg.py.in
-  ${CMAKE_CURRENT_BINARY_DIR}/lit.site.cfg.py
+  ${CMAKE_CURRENT_BINARY_DIR}/test_frontend/lit.site.cfg.py
   MAIN_CONFIG
-  ${CMAKE_CURRENT_SOURCE_DIR}/lit.cfg.py
+  ${CMAKE_CURRENT_SOURCE_DIR}/test_frontend/lit.cfg.py
 )
-add_lit_testsuite(check-hpvm-torch2hpvm "Run tests for package torch2hpvm"
-  ${CMAKE_CURRENT_BINARY_DIR}
+add_lit_testsuite(check-hpvm-torch2hpvm "Run tests for HPVM PyTorch frontend"
+  ${CMAKE_CURRENT_BINARY_DIR}/test_frontend
   # We depend on check_dnn_acc.py defined in ../hpvm-c/
   # to compare the inference accuracy of our frontend-generated binary
   # to that of the baseline.
   DEPENDS check_dnn_acc
   ARGS "-j1"  # Run frontend generation sequentially
 )
+
+# --[ llvm-lit test setup for test_tuning/
+configure_lit_site_cfg(
+  ../../lit.site.cfg.py.in
+  ${CMAKE_CURRENT_BINARY_DIR}/test_tuning/lit.site.cfg.py
+  MAIN_CONFIG
+  ${CMAKE_CURRENT_SOURCE_DIR}/test_tuning/lit.cfg.py
+)
+add_lit_testsuite(check-hpvm-tuning "Run tests for autotuning procedure"
+  ${CMAKE_CURRENT_BINARY_DIR}/test_tuning
+  ARGS "-j1"  # Run tuning tests sequentially
+)
diff --git a/hpvm/test/dnn_benchmarks/pytorch/alexnet2_cifar10.test b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/alexnet2_cifar10.test
similarity index 100%
rename from hpvm/test/dnn_benchmarks/pytorch/alexnet2_cifar10.test
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/alexnet2_cifar10.test
diff --git a/hpvm/test/dnn_benchmarks/pytorch/alexnet_cifar10.test b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/alexnet_cifar10.test
similarity index 100%
rename from hpvm/test/dnn_benchmarks/pytorch/alexnet_cifar10.test
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/alexnet_cifar10.test
diff --git a/hpvm/test/dnn_benchmarks/pytorch/alexnet_imagenet.test b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/alexnet_imagenet.test
similarity index 100%
rename from hpvm/test/dnn_benchmarks/pytorch/alexnet_imagenet.test
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/alexnet_imagenet.test
diff --git a/hpvm/test/dnn_benchmarks/pytorch/lenet_mnist.test b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/lenet_mnist.test
similarity index 100%
rename from hpvm/test/dnn_benchmarks/pytorch/lenet_mnist.test
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/lenet_mnist.test
diff --git a/hpvm/test/dnn_benchmarks/pytorch/lit.cfg.py b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/lit.cfg.py
similarity index 100%
rename from hpvm/test/dnn_benchmarks/pytorch/lit.cfg.py
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/lit.cfg.py
diff --git a/hpvm/test/dnn_benchmarks/pytorch/mobilenet_cifar10.test b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/mobilenet_cifar10.test
similarity index 100%
rename from hpvm/test/dnn_benchmarks/pytorch/mobilenet_cifar10.test
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/mobilenet_cifar10.test
diff --git a/hpvm/test/dnn_benchmarks/pytorch/resnet18_cifar10.test b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/resnet18_cifar10.test
similarity index 100%
rename from hpvm/test/dnn_benchmarks/pytorch/resnet18_cifar10.test
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/resnet18_cifar10.test
diff --git a/hpvm/test/dnn_benchmarks/pytorch/resnet50_imagenet.test b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/resnet50_imagenet.test
similarity index 100%
rename from hpvm/test/dnn_benchmarks/pytorch/resnet50_imagenet.test
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/resnet50_imagenet.test
diff --git a/hpvm/test/dnn_benchmarks/pytorch/test_frontend.py b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/test_frontend.py
similarity index 94%
rename from hpvm/test/dnn_benchmarks/pytorch/test_frontend.py
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/test_frontend.py
index 3c20c6ea5a472a693156b4881b58d4e0f1fc8575..56161495d5baa9d570a5d2f36188fe437255de57 100755
--- a/hpvm/test/dnn_benchmarks/pytorch/test_frontend.py
+++ b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/test_frontend.py
@@ -1,5 +1,4 @@
 #!/usr/bin/env python3
-import os
 import shutil
 import site
 from pathlib import Path
@@ -10,7 +9,7 @@ import torch
 from torch2hpvm import BinDataset, ModelExporter
 from torch.nn import Module
 
-site.addsitedir(os.path.dirname(__file__))
+site.addsitedir(Path(__file__).parent.parent.absolute())
 import dnn
 
 benchmarks = {
@@ -33,7 +32,7 @@ print(f"Generating {netname} to {codegen_dir}")
 if codegen_dir.exists():
     shutil.rmtree(codegen_dir)
 
-params = self_folder / "../model_params" / netname
+params = self_folder / "../../model_params" / netname
 dataset_shape = 5000, nch, img_size, img_size
 bin_tuneset = BinDataset(
     params / "tune_input.bin", params / "tune_labels.bin", dataset_shape
diff --git a/hpvm/test/dnn_benchmarks/pytorch/vgg16_cifar10.test b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/vgg16_cifar10.test
similarity index 100%
rename from hpvm/test/dnn_benchmarks/pytorch/vgg16_cifar10.test
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/vgg16_cifar10.test
diff --git a/hpvm/test/dnn_benchmarks/pytorch/vgg16_cifar100.test b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/vgg16_cifar100.test
similarity index 100%
rename from hpvm/test/dnn_benchmarks/pytorch/vgg16_cifar100.test
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/vgg16_cifar100.test
diff --git a/hpvm/test/dnn_benchmarks/pytorch/vgg16_imagenet.test b/hpvm/test/dnn_benchmarks/pytorch/test_frontend/vgg16_imagenet.test
similarity index 100%
rename from hpvm/test/dnn_benchmarks/pytorch/vgg16_imagenet.test
rename to hpvm/test/dnn_benchmarks/pytorch/test_frontend/vgg16_imagenet.test
diff --git a/hpvm/test/dnn_benchmarks/pytorch/test_tuning.py b/hpvm/test/dnn_benchmarks/pytorch/test_tuning.py
deleted file mode 100644
index d0451b70b44325a355345ad95ab9bf85154002c5..0000000000000000000000000000000000000000
--- a/hpvm/test/dnn_benchmarks/pytorch/test_tuning.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import os
-import shutil
-import site
-from pathlib import Path
-
-import torch
-from predtuner import config_pylogger
-from predtuner.pipedbin import PipedBinaryApp
-from torch2hpvm import BinDataset, ModelExporter
-from torch.nn import Module
-
-site.addsitedir(os.path.dirname(__file__))
-import dnn
-
-# Set up logger to put log file in /tmp
-msg_logger = config_pylogger(output_dir="/tmp", verbose=True)
-
-
-benchmarks = [
-    (dnn.LeNet, 1, 28, 500, "lenet_mnist"),
-    (dnn.AlexNet, 3, 32, 500, "alexnet_cifar10"),
-    (dnn.AlexNet2, 3, 32, 500, "alexnet2_cifar10"),
-    (dnn.AlexNetImageNet, 3, 224, 100, "alexnet_imagenet"),
-    (dnn.MobileNet, 3, 32, 500, "mobilenet_cifar10"),
-    (dnn.ResNet18, 3, 32, 500, "resnet18_cifar10"),
-    (dnn.ResNet50, 3, 224, 50, "resnet50_imagenet"),
-    (dnn.VGG16Cifar10, 3, 32, 500, "vgg16_cifar10"),
-    (dnn.VGG16Cifar100, 3, 32, 500, "vgg16_cifar100"),
-    (dnn.VGG16ImageNet, 3, 224, 50, "vgg16_imagenet"),
-]
-model_param = Path(__file__).parent / "../model_params"
-
-
-def generate(model_cls, nch, img_size, batch_size, pathname):
-    codegen_dir = Path(f"/tmp/{pathname}_tune")
-    build_dir = codegen_dir / "build"
-    metadata_file = codegen_dir / "ops.json"
-    binary_file = build_dir / pathname
-    build_dir = codegen_dir / "build"
-    # if binary_file.is_file() and metadata_file.is_file():
-    #     return binary_file, metadata_file
-
-    print(f"Generating {pathname} to {codegen_dir}")
-    if codegen_dir.exists():
-        shutil.rmtree(codegen_dir)
-    params = model_param / pathname
-    dataset_shape = 5000, nch, img_size, img_size
-    bin_tuneset = BinDataset(
-        params / "tune_input.bin", params / "tune_labels.bin", dataset_shape
-    )
-    bin_testset = BinDataset(
-        params / "test_input.bin", params / "test_labels.bin", dataset_shape
-    )
-    model: Module = model_cls()
-    checkpoint = model_param / f"{pathname}.pth.tar"
-    model.load_state_dict(torch.load(checkpoint.as_posix()))
-    exporter = ModelExporter(
-        model, bin_tuneset, bin_testset, codegen_dir, target="hpvm_tensor_inspect"
-    )
-    exporter.generate(batch_size=batch_size).compile(binary_file, build_dir)
-    return binary_file, metadata_file
-
-
-def main():
-    for model_cls, nch, img_size, batch_size, pathname in benchmarks:
-        print(f"Testing {pathname}")
-        binary_file, metadata_file = generate(
-            model_cls, nch, img_size, batch_size, pathname
-        )
-        app = PipedBinaryApp("test", binary_file, metadata_file)
-        tuner = app.get_tuner()
-        tuner.tune(100, 3.0, 3.0, True, 50, cost_model="cost_linear")
-        tuner.dump_configs("configs.json")
-        fig = tuner.plot_configs(show_qos_loss=True)
-        fig.savefig("configs.png", dpi=300)
-        app.dump_hpvm_configs(tuner.best_configs, "hpvm_confs.txt")
-
-
-if __name__ == "__main__":
-    main()
\ No newline at end of file
diff --git a/hpvm/test/dnn_benchmarks/pytorch/test_tuning/alexnet_imagenet.test b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/alexnet_imagenet.test
new file mode 100644
index 0000000000000000000000000000000000000000..be88fde546f5ce863619b73bb00519994def225c
--- /dev/null
+++ b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/alexnet_imagenet.test
@@ -0,0 +1 @@
+RUN: test_tuning.py alexnet_imagenet 0
diff --git a/hpvm/test/dnn_benchmarks/pytorch/test_tuning/lenet_mnist_p1.test b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/lenet_mnist_p1.test
new file mode 100644
index 0000000000000000000000000000000000000000..cea2fbf5e7995eb6fa0244ccc69c3ccf3273af27
--- /dev/null
+++ b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/lenet_mnist_p1.test
@@ -0,0 +1 @@
+RUN: test_tuning.py lenet_mnist 1
diff --git a/hpvm/test/dnn_benchmarks/pytorch/test_tuning/lit.cfg.py b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/lit.cfg.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f026e6c61771bd8483573e93ec001289b500351
--- /dev/null
+++ b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/lit.cfg.py
@@ -0,0 +1,32 @@
+# -*- Python -*-
+
+# Configuration file for the 'lit' test runner.
+
+import os
+
+import lit.formats
+from lit.llvm import llvm_config
+
+# name: The name of this test suite.
+config.name = "HPVM-Predtuner"
+
+# testFormat: The test format to use to interpret tests.
+config.test_format = lit.formats.ShTest(False)
+
+# suffixes: A list of file extensions to treat as test files. This is overriden
+# by individual lit.local.cfg files in the test subdirectories.
+config.suffixes = [".test"]
+
+# test_source_root: The root path where tests are located.
+config.test_source_root = os.path.dirname(__file__)
+
+# test_exec_root: The root path where tests should be run.
+current_source_dir = os.path.dirname(os.path.relpath(__file__, config.llvm_src_root))
+current_binary_dir = os.path.join(config.llvm_obj_root, current_source_dir)
+config.test_exec_root = current_binary_dir
+
+# Tweak the PATH to include the tools dir.
+llvm_config.with_environment("PATH", config.llvm_tools_dir, append_path=True)
+
+# Add substitution for our main script in this directory.
+llvm_config.add_tool_substitutions(["test_tuning.py"], config.test_source_root)
diff --git a/hpvm/test/dnn_benchmarks/pytorch/test_tuning/mobilenet_cifar10.test b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/mobilenet_cifar10.test
new file mode 100644
index 0000000000000000000000000000000000000000..88d5c4a0492a4ffb7b02d8f34e6447006138c558
--- /dev/null
+++ b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/mobilenet_cifar10.test
@@ -0,0 +1 @@
+RUN: test_tuning.py mobilenet_cifar10 0
diff --git a/hpvm/test/dnn_benchmarks/pytorch/test_tuning/resnet18_cifar10.test b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/resnet18_cifar10.test
new file mode 100644
index 0000000000000000000000000000000000000000..4d00d4dae23544aea236c0c9015dca3bdad1252a
--- /dev/null
+++ b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/resnet18_cifar10.test
@@ -0,0 +1 @@
+RUN: test_tuning.py resnet18_cifar10 0
diff --git a/hpvm/test/dnn_benchmarks/pytorch/test_tuning/test_tuning.py b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/test_tuning.py
new file mode 100755
index 0000000000000000000000000000000000000000..b1ab7ec2ab83487731043a8519c7dd3eb97865f2
--- /dev/null
+++ b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/test_tuning.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python3
+import shutil
+import site
+from pathlib import Path
+from sys import argv
+
+import torch
+from predtuner import config_pylogger
+from predtuner.pipedbin import PipedBinaryApp
+from torch2hpvm import BinDataset, ModelExporter
+from torch.nn import Module
+
+site.addsitedir(Path(__file__).parent.parent.absolute())
+import dnn
+
+# Set up logger to put log file in /tmp
+msg_logger = config_pylogger(output_dir="/tmp", verbose=True)
+benchmarks = {
+    "lenet_mnist": (dnn.LeNet, 1, 28, 1000),
+    "alexnet_imagenet": (dnn.AlexNetImageNet, 3, 224, 100),
+    "mobilenet_cifar10": (dnn.MobileNet, 3, 32, 500),
+    "resnet18_cifar10": (dnn.ResNet18, 3, 32, 500),
+    "vgg16_cifar10": (dnn.VGG16Cifar10, 3, 32, 500),
+}
+model_param = Path(__file__).parent / "../../model_params"
+
+
+def generate(model_cls, nch, img_size, batch_size, netname):
+    codegen_dir = Path(f"./{netname}")
+    build_dir = codegen_dir / "build"
+    metadata_file = codegen_dir / "ops.json"
+    binary_file = build_dir / netname
+    build_dir = codegen_dir / "build"
+
+    if codegen_dir.exists():
+        shutil.rmtree(codegen_dir)
+    params = model_param / netname
+    dataset_shape = 5000, nch, img_size, img_size
+    bin_tuneset = BinDataset(
+        params / "tune_input.bin", params / "tune_labels.bin", dataset_shape
+    )
+    bin_testset = BinDataset(
+        params / "test_input.bin", params / "test_labels.bin", dataset_shape
+    )
+    model: Module = model_cls()
+    checkpoint = model_param / f"pytorch/{netname}.pth.tar"
+    model.load_state_dict(torch.load(checkpoint.as_posix()))
+    exporter = ModelExporter(
+        model, bin_tuneset, bin_testset, codegen_dir, target="hpvm_tensor_inspect"
+    )
+    exporter.generate(batch_size=batch_size).compile(binary_file, build_dir)
+    return binary_file, metadata_file
+
+
+def main():
+    netname, is_pred = argv[1:]
+    is_pred = int(is_pred)
+    model_cls, nch, img_size, batch_size = benchmarks[netname]
+    binary_file, metadata_file = generate(
+        model_cls, nch, img_size, batch_size, netname
+    )
+    print(Path.cwd(), binary_file, metadata_file)
+    app = PipedBinaryApp("test", binary_file, metadata_file)
+    tuner = app.get_tuner()
+    tuner.tune(
+        5,
+        3.0,
+        is_threshold_relative=True,
+        cost_model="cost_linear",
+        qos_model="qos_p1" if is_pred else "none",
+    )
+    tuner.dump_configs("configs.json")
+    fig = tuner.plot_configs(show_qos_loss=True)
+    fig.savefig("configs.png", dpi=300)
+    app.dump_hpvm_configs(tuner.best_configs, "hpvm_confs.txt")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/hpvm/test/dnn_benchmarks/pytorch/test_tuning/vgg16_cifar10.test b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/vgg16_cifar10.test
new file mode 100644
index 0000000000000000000000000000000000000000..2084c2bd74dcf09535971c09e228182f5ebf5c61
--- /dev/null
+++ b/hpvm/test/dnn_benchmarks/pytorch/test_tuning/vgg16_cifar10.test
@@ -0,0 +1 @@
+RUN: test_tuning.py vgg16_cifar10 0
diff --git a/hpvm/test/lit.site.cfg.py.in b/hpvm/test/lit.site.cfg.py.in
index 0ed68ccfa0d05e797463dcd2e0a1f9030a20b99a..7f1bd1cd0ef9c007a41bea1f3db41f7abf60f449 100644
--- a/hpvm/test/lit.site.cfg.py.in
+++ b/hpvm/test/lit.site.cfg.py.in
@@ -10,4 +10,5 @@ import lit.llvm
 lit.llvm.initialize(lit_config, config)
 
 # Let the main config do the real work.
-lit_config.load_config(config, "@CMAKE_CURRENT_SOURCE_DIR@/lit.cfg.py")
+# (ARG_MAIN_CONFIG is a variable defined in configure_lit_site_cfg)
+lit_config.load_config(config, "@ARG_MAIN_CONFIG@")