Skip to content
Snippets Groups Projects
Commit 6ef2c37e authored by Yifan Zhao's avatar Yifan Zhao
Browse files

Changed communication protocol

parent c9da92cf
No related branches found
No related tags found
No related merge requests found
...@@ -154,8 +154,6 @@ class ModelExporter: ...@@ -154,8 +154,6 @@ class ModelExporter:
"knob_speedup": knob_speedup, "knob_speedup": knob_speedup,
"op_knobs": op_knobs, "op_knobs": op_knobs,
"baseline_knob": baseline_knob, "baseline_knob": baseline_knob,
"tune_args": "tune",
"test_args": "test",
}, },
f, f,
indent=2, indent=2,
......
...@@ -10,14 +10,16 @@ ...@@ -10,14 +10,16 @@
#include <sys/stat.h> #include <sys/stat.h>
/**** Routines for Handling Piped Execution ***/ /**** Routines for Handling Piped Execution ***/
bool fifo_wait(const std::string &filename) { int fifo_wait(const std::string &filename) {
std::ifstream file{filename}; std::ifstream file{filename};
std::string line; std::string line;
std::getline(file, line); std::getline(file, line);
if (line == "next") if (line == "test")
return true; return 1;
if (line == "tune")
return 2;
if (line == "stop") if (line == "stop")
return false; return 0;
std::cout << "Invalid fifo file content \"" << line << "\"\n"; std::cout << "Invalid fifo file content \"" << line << "\"\n";
abort(); abort();
} }
...@@ -33,15 +35,6 @@ void make_fifo(const std::string &filename) { ...@@ -33,15 +35,6 @@ void make_fifo(const std::string &filename) {
file << "{{conf_path}}\n"; // Write path to config file in FIFO file file << "{{conf_path}}\n"; // Write path to config file in FIFO file
return; return;
} }
if (errno == EEXIST) {
if (unlink(filename.c_str()) < 0) {
std::cout << "Error removing existing file: " << strerror(errno) << '\n';
abort();
}
make_fifo(filename);
return;
}
std::cout << "Error making FIFO file: " << strerror(errno) << '\n'; std::cout << "Error making FIFO file: " << strerror(errno) << '\n';
abort(); abort();
} }
...@@ -105,20 +98,13 @@ typedef struct __attribute__((__packed__)) { ...@@ -105,20 +98,13 @@ typedef struct __attribute__((__packed__)) {
const int batch_size = {{batch_size}}, input_size = {{input_size}}, batch_count = input_size / batch_size; const int batch_size = {{batch_size}}, input_size = {{input_size}}, batch_count = input_size / batch_size;
int main(int argc, char *argv[]){ int main(){
if (argc != 2) {
std::cout << "Usage: " << argv[0] << " {tune|test}\n";
return 1;
}
std::string arg1 = argv[1];
if (arg1 != "tune" && arg1 != "test") {
std::cout << "Usage: " << argv[0] << " {tune|test}\n";
return 1;
}
std::string dir_prefix = "{{prefix}}/"; std::string dir_prefix = "{{prefix}}/";
std::string input_path = dir_prefix + arg1 + "_input.bin"; std::string test_input = dir_prefix + "test_input.bin";
std::string labels_path = dir_prefix + arg1 + "_labels.bin"; std::string test_labels = dir_prefix + "test_labels.bin";
std::string tune_input = dir_prefix + "tune_input.bin";
std::string tune_labels = dir_prefix + "tune_labels.bin";
{% for w in weights %} {% for w in weights %}
std::string {{w.name}}_path = dir_prefix + "{{w.filename}}"; std::string {{w.name}}_path = dir_prefix + "{{w.filename}}";
void* {{w.name}} = readTrainedWeights({{w.name}}_path.c_str(), 0, {{w.shape|join(', ')}}); void* {{w.name}} = readTrainedWeights({{w.name}}_path.c_str(), 0, {{w.shape|join(', ')}});
...@@ -132,19 +118,23 @@ int main(int argc, char *argv[]){ ...@@ -132,19 +118,23 @@ int main(int argc, char *argv[]){
{% endfor %} {% endfor %}
make_fifo("{{fifo_path}}"); make_fifo("{{fifo_path}}");
while (fifo_wait("{{fifo_path}}")) { int ret = 0;
while ((ret = fifo_wait("{{fifo_path}}"))) {
__hpvm__init(); __hpvm__init();
startMemTracking(); startMemTracking();
const auto *input_pth = (ret == 1 ? test_input : tune_input).c_str();
const auto *labels_pth = (ret == 1 ? test_labels : tune_labels).c_str();
for (int i = 0; i < batch_count; i++){ for (int i = 0; i < batch_count; i++){
int start = i * batch_size, end = start + batch_size; int start = i * batch_size, end = start + batch_size;
copyInputBatch(input_path.c_str(), start, end, {{input_shape|join(', ')}}, {{input_name}}); copyInputBatch(input_pth, start, end, {{input_shape|join(', ')}}, {{input_name}});
void* dfg = __hpvm__launch(0, root, (void*) args); void* dfg = __hpvm__launch(0, root, (void*) args);
__hpvm__wait(dfg); __hpvm__wait(dfg);
void *result = static_cast<RootIn*>(args)->r.tensor; void *result = static_cast<RootIn*>(args)->r.tensor;
hpvm_request_tensor(result, 0); hpvm_request_tensor(result, 0);
llvm_hpvm_invokeRtControl(result, labels_path.c_str(), start, end); llvm_hpvm_invokeRtControl(result, labels_pth, start, end);
freeBatchMemory(); freeBatchMemory();
} }
__hpvm__cleanup(); __hpvm__cleanup();
......
import shutil
from pathlib import Path
from subprocess import run
import torch
from torch2hpvm import BinDataset, ModelExporter
from torch.nn import Module
from predtuner.pipedbin import PipedBinaryApp
import os import os
import shutil import shutil
import site import site
from pathlib import Path from pathlib import Path
from subprocess import run from subprocess import Popen
import torch
import torch
from predtuner.pipedbin import PipedBinaryApp
from torch2hpvm import BinDataset, ModelExporter from torch2hpvm import BinDataset, ModelExporter
from torch.nn import Module from torch.nn import Module
...@@ -37,29 +28,30 @@ benchmarks = [ ...@@ -37,29 +28,30 @@ benchmarks = [
self_folder = Path(__file__).parent self_folder = Path(__file__).parent
model_cls, nch, img_size, batch_size, pathname = benchmarks[0] model_cls, nch, img_size, batch_size, pathname = benchmarks[0]
codegen_dir = Path(f"/tmp/{pathname}_tune") codegen_dir = Path(f"/tmp/{pathname}_tune")
print(f"Generating {pathname} to {codegen_dir}")
if codegen_dir.exists():
shutil.rmtree(codegen_dir)
params = self_folder / "../model_params" / pathname
dataset_shape = 5000, nch, img_size, img_size
bin_tuneset = BinDataset(
params / "tune_input.bin", params / "tune_labels.bin", dataset_shape
)
bin_testset = BinDataset(
params / "test_input.bin", params / "test_labels.bin", dataset_shape
)
model: Module = model_cls()
checkpoint = self_folder / "../model_params" / f"{pathname}.pth.tar"
model.load_state_dict(torch.load(checkpoint.as_posix()))
build_dir = codegen_dir / "build" build_dir = codegen_dir / "build"
target_binary = build_dir / pathname metadata_file = codegen_dir / "ops.json"
exporter = ModelExporter( binary_file = build_dir / pathname
model, bin_tuneset, bin_testset, codegen_dir, target="hpvm_tensor_inspect" conf_file = codegen_dir / ModelExporter.config_file_name
) if not binary_file.is_file() or not metadata_file.is_file():
exporter.generate(batch_size=batch_size).compile(target_binary, build_dir) print(f"Generating {pathname} to {codegen_dir}")
run([str(target_binary), "test"], check=True) if codegen_dir.exists():
shutil.rmtree(codegen_dir)
# build_dir = codegen_dir / "build"
# print(PipedBinaryApp("test", codegen_dir / "ops.json", build_dir / "lenet_mnist", build_dir)) params = self_folder / "../model_params" / pathname
dataset_shape = 5000, nch, img_size, img_size
bin_tuneset = BinDataset(
params / "tune_input.bin", params / "tune_labels.bin", dataset_shape
)
bin_testset = BinDataset(
params / "test_input.bin", params / "test_labels.bin", dataset_shape
)
model: Module = model_cls()
checkpoint = self_folder / "../model_params" / f"{pathname}.pth.tar"
model.load_state_dict(torch.load(checkpoint.as_posix()))
exporter = ModelExporter(
model, bin_tuneset, bin_testset, codegen_dir, target="hpvm_tensor_inspect"
)
exporter.generate(batch_size=batch_size).compile(binary_file, build_dir)
app = PipedBinaryApp("test", codegen_dir, "ops.json", f"build/{pathname}", "hpvm_fifo")
app.measure_qos_perf({}, False)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment