Skip to content
Snippets Groups Projects
Commit 86da50ae authored by Yifan Zhao's avatar Yifan Zhao
Browse files

Added weights and scripts for miniera dnn

parent fc569c07
No related branches found
No related tags found
No related merge requests found
Showing
with 179 additions and 0 deletions
input: 0.007874015748031496
conv1: 0.041152522047244094
add1: 0.041152522047244094
relu1: 0.03673215196850394
conv2: 0.18990387401574804
add2: 0.18990387401574804
relu2: 0.12851757480314963
pool1: 0.12851757480314963
conv3: 0.4379456692913386
add3: 0.4379456692913386
relu3: 0.13566622047244095
conv4: 0.3754296456692913
add4: 0.3754296456692913
relu4: 0.15238466141732282
pool2: 0.15238466141732282
gemm1: 0.27306629921259845
add5: 0.27306629921259845
relu5: 0.20020370078740157
gemm2: 0.6007037007874017
add6: 0.6007037007874017
softmax1: 0.007874015748031496
α>a?N?QJ%t l=&^L8?د>r: ?B Y?;U>=h?rX
=&݈C#S>7>vɅ>ɇ !?
\ No newline at end of file
File added
.=?vS놻 >
Qż8V.I>p;dd=h䈾N.=/ھl; 46>cTʾR 꿐2=c_Z
\ No newline at end of file
File added
ҽ+?$m>(>h >qBy*>IҸ>K?@t\? H =>;_>f=;┿>j޾DӻU>`? c>Ȏ?ժ?/< #&??Dy<Hbfl:?wSM )>}t t'?$<!潠?ˡc<Za>E5>0J>p9J>W>nr>e'?+RG>k;bƿR>{>㚽
\ No newline at end of file
File added
File added
File added
File added
File added
k";(>J@?
\ No newline at end of file
File added
File added
File added
File added
import site
from pathlib import Path
import torch
from torch2hpvm import BinDataset, ModelExporter
self_folder = Path(__file__).parent.absolute()
site.addsitedir(self_folder)
from torch_dnn import MiniERA
asset_dir = self_folder / "assets/miniera"
output_dir = Path("/tmp/miniera")
bin_dataset = BinDataset(asset_dir / "input.bin", asset_dir / "labels.bin", (5000, 3, 32, 32))
model = MiniERA()
model.load_state_dict(torch.load(asset_dir / "miniera.pth"))
exporter = ModelExporter(model, bin_dataset, output_dir, asset_dir / "calib.txt")
exporter.generate().compile(output_dir / "miniera", output_dir)
"""Make PyTorch checkpoint of MiniERA model from legacy HPVM weights."""
import site
from pathlib import Path
import torch
self_folder = Path(__file__).parent.absolute()
site.addsitedir(self_folder)
from torch_dnn import CIFAR, MiniERA
@torch.no_grad()
def main():
prefix = self_folder / "assets/miniera"
model = MiniERA().load_legacy_hpvm_weights(prefix)
# Test mini ERA
dataset = CIFAR.from_file(prefix / "input.bin", prefix / "labels.bin")
dataloader = torch.utils.data.DataLoader(dataset, batch_size=1, shuffle=False)
correct = 0
total = 0
for data in dataloader:
images, labels = data[0], data[1]
outputs = model(images)
_, predicted = torch.max(outputs.data, 1)
total += labels.size(0)
correct += (predicted == labels).sum().item()
print(f"Accuracy of the network on the test images: {100 * correct / total} %")
torch.save(model.state_dict(), prefix / "miniera.pth")
if __name__ == "__main__":
main()
from .datasets import CIFAR
from .miniera import MiniERA
import logging
from pathlib import Path
from typing import Iterator, Tuple, Union
import numpy as np
import torch
from torch.utils.data.dataset import Dataset
RetT = Tuple[torch.Tensor, torch.Tensor]
msg_logger = logging.getLogger(__name__)
PathLike = Union[Path, str]
class SingleFileDataset(Dataset):
def __init__(self, inputs: torch.Tensor, outputs: torch.Tensor):
self.inputs, self.outputs = inputs, outputs
@classmethod
def from_file(cls, *args, **kwargs):
pass
@property
def sample_input(self):
inputs, outputs = next(iter(self))
return inputs
def __len__(self) -> int:
return len(self.inputs)
def __getitem__(self, idx) -> RetT:
return self.inputs[idx], self.outputs[idx]
def __iter__(self) -> Iterator[RetT]:
for i in range(len(self)):
yield self[i]
class DNNDataset(SingleFileDataset):
image_shape = None
label_ty = np.int32
@classmethod
def from_file(
cls,
input_file: PathLike,
labels_file: PathLike,
count: int = -1,
offset: int = 0,
):
# NOTE: assuming (N, *) ordering of inputs (such as NCHW, NHWC)
channel_size = np.prod(np.array(cls.image_shape))
inputs_count_byte = -1 if count == -1 else count * channel_size
inputs = read_tensor_from_file(
input_file,
-1,
*cls.image_shape,
count=inputs_count_byte,
offset=offset * channel_size,
)
labels = read_tensor_from_file(
labels_file,
-1,
read_ty=cls.label_ty,
cast_ty=np.long,
count=count,
offset=offset,
)
if inputs.shape[0] != labels.shape[0]:
raise ValueError("Input and output have different number of data points")
msg_logger.info(f"%d entries loaded from dataset.", inputs.shape[0])
return cls(inputs, labels)
class MNIST(DNNDataset):
image_shape = 1, 28, 28
class CIFAR(DNNDataset):
image_shape = 3, 32, 32
class ImageNet(DNNDataset):
image_shape = 3, 224, 224
def read_tensor_from_file(
filename: Union[str, Path],
*shape: int,
read_ty=np.float32,
cast_ty=np.float32,
count: int = -1,
offset: int = 0,
) -> torch.Tensor:
offset = offset * read_ty().itemsize
mmap = np.memmap(filename, dtype=read_ty, mode="r", offset=offset)
n_entries = min(mmap.shape[0], count) if count != -1 else mmap.shape[0]
np_array = mmap[:n_entries].reshape(shape).astype(cast_ty)
return torch.from_numpy(np_array).clone()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment