Skip to content
Snippets Groups Projects
Commit 922eeaee authored by Yifan Zhao's avatar Yifan Zhao
Browse files

Moved example code generation into dnn package

parent 612f1cba
No related branches found
No related tags found
No related merge requests found
from pathlib import Path
from typing import Union
import torch
from torch2hpvm import BinDataset, ModelExporter
from .alexnet import AlexNet, AlexNet2, AlexNetImageNet
from .datasets import CIFAR, MNIST, ImageNet
from .lenet import LeNet
from .vgg16 import VGG16Cifar10, VGG16Cifar100, VGG16ImageNet
from .mobilenet import MobileNet
from .resnet import ResNet18, ResNet50
from .vgg16 import VGG16Cifar10, VGG16Cifar100, VGG16ImageNet
# DNN name -> (DNN class, input_channel, input_size, suggested_batchsize)
benchmarks = {
"lenet_mnist": (LeNet, 1, 28, 1000),
"alexnet_cifar10": (AlexNet, 3, 32, 500),
"alexnet2_cifar10": (AlexNet2, 3, 32, 500),
"alexnet_imagenet": (AlexNetImageNet, 3, 224, 500),
"mobilenet_cifar10": (MobileNet, 3, 32, 500),
"resnet18_cifar10": (ResNet18, 3, 32, 500),
"resnet50_imagenet": (ResNet50, 3, 224, 25),
"vgg16_cifar10": (VGG16Cifar10, 3, 32, 500),
"vgg16_cifar100": (VGG16Cifar100, 3, 32, 500),
"vgg16_imagenet": (VGG16ImageNet, 3, 224, 10),
}
def export_example_dnn(
dnn_name: str, output_dir: Union[Path, str], generate_for_tuning: bool
):
self_folder = Path(__file__).parent.absolute()
dnn_bench_dir = self_folder / "../.."
model_cls, nch, img_size, batch_size = benchmarks[dnn_name]
dataset_shape = 5000, nch, img_size, img_size
params = dnn_bench_dir / "model_params" / dnn_name
bin_tuneset = BinDataset(
params / "tune_input.bin", params / "tune_labels.bin", dataset_shape
)
bin_testset = BinDataset(
params / "test_input.bin", params / "test_labels.bin", dataset_shape
)
model: Module = model_cls()
checkpoint = dnn_bench_dir / f"model_params/pytorch/{dnn_name}.pth.tar"
model.load_state_dict(torch.load(checkpoint.as_posix()))
build_dir = output_dir / "build"
target_binary = build_dir / dnn_name
if generate_for_tuning:
exporter = ModelExporter(
model, bin_tuneset, bin_testset, output_dir, target="hpvm_tensor_inspect"
)
else:
conf_file = (
dnn_bench_dir / "hpvm-c/benchmarks" / dnn_name / "data/tuner_confs.txt"
).absolute()
exporter = ModelExporter(
model, bin_tuneset, bin_testset, output_dir, config_file=conf_file
)
exporter.generate(batch_size=batch_size).compile(target_binary, build_dir)
return target_binary, exporter
......@@ -5,51 +5,14 @@ from pathlib import Path
from subprocess import run
from sys import argv
import torch
from torch2hpvm import BinDataset, ModelExporter
from torch.nn import Module
site.addsitedir(Path(__file__).parent.parent.absolute())
self_folder = Path(__file__).parent.absolute()
site.addsitedir(self_folder.parent)
import dnn
benchmarks = {
"lenet_mnist": (dnn.LeNet, 1, 28, 1000),
"alexnet_cifar10": (dnn.AlexNet, 3, 32, 500),
"alexnet2_cifar10": (dnn.AlexNet2, 3, 32, 500),
"alexnet_imagenet": (dnn.AlexNetImageNet, 3, 224, 500),
"mobilenet_cifar10": (dnn.MobileNet, 3, 32, 500),
"resnet18_cifar10": (dnn.ResNet18, 3, 32, 500),
"resnet50_imagenet": (dnn.ResNet50, 3, 224, 25),
"vgg16_cifar10": (dnn.VGG16Cifar10, 3, 32, 500),
"vgg16_cifar100": (dnn.VGG16Cifar100, 3, 32, 500),
"vgg16_imagenet": (dnn.VGG16ImageNet, 3, 224, 10),
}
self_folder = Path(__file__).parent
netname = argv[1]
model_cls, nch, img_size, batch_size = benchmarks[netname]
codegen_dir = Path(f"./{netname}")
print(f"Generating {netname} to {codegen_dir}")
if codegen_dir.exists():
shutil.rmtree(codegen_dir)
params = self_folder / "../../model_params" / netname
dataset_shape = 5000, nch, img_size, img_size
bin_tuneset = BinDataset(
params / "tune_input.bin", params / "tune_labels.bin", dataset_shape
)
bin_testset = BinDataset(
params / "test_input.bin", params / "test_labels.bin", dataset_shape
)
model: Module = model_cls()
checkpoint = self_folder / "../model_params/pytorch" / f"{netname}.pth.tar"
model.load_state_dict(torch.load(checkpoint.as_posix()))
print(model)
build_dir = codegen_dir / "build"
target_binary = build_dir / netname
conf_file = self_folder / "../hpvm-c/benchmarks" / netname / "data/tuner_confs.txt"
exporter = ModelExporter(
model, bin_tuneset, bin_testset, codegen_dir, config_file=conf_file
)
exporter.generate(batch_size=batch_size).compile(target_binary, build_dir)
target_binary, _ = dnn.export_example_dnn(netname, codegen_dir, False)
run([str(target_binary), "test"], check=True)
......@@ -4,61 +4,33 @@ import site
from pathlib import Path
from sys import argv
import torch
from predtuner import config_pylogger
from predtuner.pipedbin import PipedBinaryApp
from torch2hpvm import BinDataset, ModelExporter
from torch.nn import Module
from predtuner import PipedBinaryApp, config_pylogger
site.addsitedir(Path(__file__).parent.parent.absolute())
self_folder = Path(__file__).parent.absolute()
site.addsitedir(self_folder.parent)
import dnn
# Set up logger
msg_logger = config_pylogger(output_dir=".", verbose=True)
benchmarks = {
"lenet_mnist": (dnn.LeNet, 1, 28, 1000),
"alexnet_imagenet": (dnn.AlexNetImageNet, 3, 224, 100),
"mobilenet_cifar10": (dnn.MobileNet, 3, 32, 500),
"resnet18_cifar10": (dnn.ResNet18, 3, 32, 500),
"vgg16_cifar10": (dnn.VGG16Cifar10, 3, 32, 500),
}
model_param = Path(__file__).parent / "../../model_params"
def generate(model_cls, nch, img_size, batch_size, netname):
codegen_dir = Path(f"./{netname}")
build_dir = codegen_dir / "build"
metadata_file = codegen_dir / "ops.json"
binary_file = build_dir / netname
build_dir = codegen_dir / "build"
if codegen_dir.exists():
shutil.rmtree(codegen_dir)
params = model_param / netname
dataset_shape = 5000, nch, img_size, img_size
bin_tuneset = BinDataset(
params / "tune_input.bin", params / "tune_labels.bin", dataset_shape
)
bin_testset = BinDataset(
params / "test_input.bin", params / "test_labels.bin", dataset_shape
)
model: Module = model_cls()
checkpoint = model_param / f"pytorch/{netname}.pth.tar"
model.load_state_dict(torch.load(checkpoint.as_posix()))
exporter = ModelExporter(
model, bin_tuneset, bin_testset, codegen_dir, target="hpvm_tensor_inspect"
)
exporter.generate(batch_size=batch_size).compile(binary_file, build_dir)
binary_file, exporter = dnn.export_example_dnn(netname, codegen_dir, True)
metadata_file = codegen_dir / exporter.metadata_file
return binary_file, metadata_file
def main():
netname, is_pred = argv[1:]
is_pred = int(is_pred)
model_cls, nch, img_size, batch_size = benchmarks[netname]
binary_file, metadata_file = generate(
model_cls, nch, img_size, batch_size, netname
)
# Generating tunable binary
codegen_dir = Path(f"./{netname}")
if codegen_dir.exists():
shutil.rmtree(codegen_dir)
binary_file, exporter = dnn.export_example_dnn(netname, codegen_dir, True)
metadata_file = codegen_dir / exporter.metadata_file_name
# Tuning
app = PipedBinaryApp("test", binary_file, metadata_file)
tuner = app.get_tuner()
tuner.tune(
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment