Skip to content
Snippets Groups Projects
Commit 0e3fb6bc authored by Hashim Sharif's avatar Hashim Sharif
Browse files

Improved Quantized Model for VGG-16 CIFAR100

parent 603d6418
No related branches found
No related tags found
No related merge requests found
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <fcntl.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <string.h>
#include "../../../tensor_runtime/include/tensor_runtime.h"
#include "../../include/utils.h"
int main(){
llvm_hpvm_initTensorRt(0);
int total_runs = 1;
for (int i = 0 ; i < total_runs; i++){
startMemTracking();
int test_input_size = 10000;
int batch_size = 2500;
int batch_count = test_input_size / batch_size;
float final_accuracy = 0.0;
for(int i = 0; i < batch_count; i++){
std::string dir_prefix = std::string("../model_params/vgg16_cifar100_front/");
std::string input_path = dir_prefix + std::string("input.bin");
std::string labels_path = dir_prefix + std::string("labels.bin");
std::string conv2d_1_w_path = dir_prefix + std::string("conv2d_1_w.bin");
void* conv2d_1_w = readTrainedWeights(conv2d_1_w_path.c_str(), 0,64,3,3,3);
std::string conv2d_1_b_path = dir_prefix + std::string("conv2d_1_b.bin");
void* conv2d_1_b = readTrainedWeights(conv2d_1_b_path.c_str(), 0,1,64,1,1);
std::string conv2d_2_w_path = dir_prefix + std::string("conv2d_2_w.bin");
void* conv2d_2_w = readTrainedWeights(conv2d_2_w_path.c_str(), 0,64,64,3,3);
std::string conv2d_2_b_path = dir_prefix + std::string("conv2d_2_b.bin");
void* conv2d_2_b = readTrainedWeights(conv2d_2_b_path.c_str(), 0,1,64,1,1);
std::string conv2d_3_w_path = dir_prefix + std::string("conv2d_3_w.bin");
void* conv2d_3_w = readTrainedWeights(conv2d_3_w_path.c_str(), 0,128,64,3,3);
std::string conv2d_3_b_path = dir_prefix + std::string("conv2d_3_b.bin");
void* conv2d_3_b = readTrainedWeights(conv2d_3_b_path.c_str(), 0,1,128,1,1);
std::string conv2d_4_w_path = dir_prefix + std::string("conv2d_4_w.bin");
void* conv2d_4_w = readTrainedWeights(conv2d_4_w_path.c_str(), 0,128,128,3,3);
std::string conv2d_4_b_path = dir_prefix + std::string("conv2d_4_b.bin");
void* conv2d_4_b = readTrainedWeights(conv2d_4_b_path.c_str(), 0,1,128,1,1);
std::string conv2d_5_w_path = dir_prefix + std::string("conv2d_5_w.bin");
void* conv2d_5_w = readTrainedWeights(conv2d_5_w_path.c_str(), 0,256,128,3,3);
std::string conv2d_5_b_path = dir_prefix + std::string("conv2d_5_b.bin");
void* conv2d_5_b = readTrainedWeights(conv2d_5_b_path.c_str(), 0,1,256,1,1);
std::string conv2d_6_w_path = dir_prefix + std::string("conv2d_6_w.bin");
void* conv2d_6_w = readTrainedWeights(conv2d_6_w_path.c_str(), 0,256,256,3,3);
std::string conv2d_6_b_path = dir_prefix + std::string("conv2d_6_b.bin");
void* conv2d_6_b = readTrainedWeights(conv2d_6_b_path.c_str(), 0,1,256,1,1);
std::string conv2d_7_w_path = dir_prefix + std::string("conv2d_7_w.bin");
void* conv2d_7_w = readTrainedWeights(conv2d_7_w_path.c_str(), 0,256,256,3,3);
std::string conv2d_7_b_path = dir_prefix + std::string("conv2d_7_b.bin");
void* conv2d_7_b = readTrainedWeights(conv2d_7_b_path.c_str(), 0,1,256,1,1);
std::string conv2d_8_w_path = dir_prefix + std::string("conv2d_8_w.bin");
void* conv2d_8_w = readTrainedWeights(conv2d_8_w_path.c_str(), 0,512,256,3,3);
std::string conv2d_8_b_path = dir_prefix + std::string("conv2d_8_b.bin");
void* conv2d_8_b = readTrainedWeights(conv2d_8_b_path.c_str(), 0,1,512,1,1);
std::string conv2d_9_w_path = dir_prefix + std::string("conv2d_9_w.bin");
void* conv2d_9_w = readTrainedWeights(conv2d_9_w_path.c_str(), 0,512,512,3,3);
std::string conv2d_9_b_path = dir_prefix + std::string("conv2d_9_b.bin");
void* conv2d_9_b = readTrainedWeights(conv2d_9_b_path.c_str(), 0,1,512,1,1);
std::string conv2d_10_w_path = dir_prefix + std::string("conv2d_10_w.bin");
void* conv2d_10_w = readTrainedWeights(conv2d_10_w_path.c_str(), 0,512,512,3,3);
std::string conv2d_10_b_path = dir_prefix + std::string("conv2d_10_b.bin");
void* conv2d_10_b = readTrainedWeights(conv2d_10_b_path.c_str(), 0,1,512,1,1);
std::string conv2d_11_w_path = dir_prefix + std::string("conv2d_11_w.bin");
void* conv2d_11_w = readTrainedWeights(conv2d_11_w_path.c_str(), 0,512,512,3,3);
std::string conv2d_11_b_path = dir_prefix + std::string("conv2d_11_b.bin");
void* conv2d_11_b = readTrainedWeights(conv2d_11_b_path.c_str(), 0,1,512,1,1);
std::string conv2d_12_w_path = dir_prefix + std::string("conv2d_12_w.bin");
void* conv2d_12_w = readTrainedWeights(conv2d_12_w_path.c_str(), 0,512,512,3,3);
std::string conv2d_12_b_path = dir_prefix + std::string("conv2d_12_b.bin");
void* conv2d_12_b = readTrainedWeights(conv2d_12_b_path.c_str(), 0,1,512,1,1);
std::string conv2d_13_w_path = dir_prefix + std::string("conv2d_13_w.bin");
void* conv2d_13_w = readTrainedWeights(conv2d_13_w_path.c_str(), 0,512,512,3,3);
std::string conv2d_13_b_path = dir_prefix + std::string("conv2d_13_b.bin");
void* conv2d_13_b = readTrainedWeights(conv2d_13_b_path.c_str(), 0,1,512,1,1);
std::string dense_1_w_path = dir_prefix + std::string("dense_1_w.bin");
void* dense_1_w = readTrainedWeights(dense_1_w_path.c_str(), 0,1,1,512,512);
std::string dense_1_b_path = dir_prefix + std::string("dense_1_b.bin");
void* dense_1_b = readTrainedWeights(dense_1_b_path.c_str(), 0,1,512,1,1);
std::string dense_2_w_path = dir_prefix + std::string("dense_2_w.bin");
void* dense_2_w = readTrainedWeights(dense_2_w_path.c_str(), 0,1,1,512,100);
std::string dense_2_b_path = dir_prefix + std::string("dense_2_b.bin");
void* dense_2_b = readTrainedWeights(dense_2_b_path.c_str(), 0,1,100,1,1);
int start = i * batch_size;
int end = (i + 1) * batch_size;
void* input = readInputBatch(input_path.c_str(),0,start,end,3,32,32);
void* var_0 = ConvLayer_PROMISE(input, -1.7829767, 1.9456929, conv2d_1_w, -0.7450515, 0.71249133, conv2d_1_b, -1.5885142, 0.275554, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.7384350299835205, 9);
void* var_1 = ConvLayer_PROMISE(var_0, 0.0, 1.7384350299835205, conv2d_2_w, -0.30790088, 0.43504623, conv2d_2_b, -1.4242363, 1.2602744, 1, 1, 1, 1, 0, 2, 1, 0.0, 4.417154796123498, 9);
void* var_2 = ConvLayer_PROMISE(var_1, 0.0, 4.417154796123498, conv2d_3_w, -0.29189092, 0.26958522, conv2d_3_b, -1.0527138, 0.9075671, 1, 1, 1, 1, -1, 0, 1, 0.0, 3.1919608163833573, 9);
void* var_3 = ConvLayer_PROMISE(var_2, 0.0, 3.1919608163833573, conv2d_4_w, -0.15521508, 0.1829038, conv2d_4_b, -0.845419, 1.9358484, 1, 1, 1, 1, 0, 2, 1, 0.0, 5.108994026184064, 9);
void* var_4 = ConvLayer_PROMISE(var_3, 0.0, 5.108994026184064, conv2d_5_w, -0.13149762, 0.14811686, conv2d_5_b, -0.7162557, 1.0370971, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.8264513099193493, 9);
void* var_5 = ConvLayer_PROMISE(var_4, 0.0, 2.8264513099193493, conv2d_6_w, -0.06236292, 0.08321518, conv2d_6_b, -0.9067523, 0.9922458, 1, 1, 1, 1, -1, 0, 1, 0.0, 2.507186658382409, 9);
void* var_6 = ConvLayer_PROMISE(var_5, 0.0, 2.507186658382409, conv2d_7_w, -0.06471479, 0.1024472, conv2d_7_b, -0.15943134, 0.7988499, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.550416946411133, 9);
void* var_7 = ConvLayer_PROMISE(var_6, 0.0, 2.550416946411133, conv2d_8_w, -0.06320205, 0.08291938, conv2d_8_b, -0.32540628, 0.5203079, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.7303829237818675, 9);
void* var_8 = ConvLayer_PROMISE(var_7, 0.0, 0.7303829237818675, conv2d_9_w, -0.037707984, 0.051601283, conv2d_9_b, -0.25622904, 0.11251946, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.32286912292241965, 9);
void* var_9 = ConvLayer_PROMISE(var_8, 0.0, 0.32286912292241965, conv2d_10_w, -0.056007143, 0.09549151, conv2d_10_b, -0.11591503, 0.06267536, 1, 1, 1, 1, 0, 2, 1, 0.0, 0.47936276525258825, 9);
void* var_10 = ConvLayer_PROMISE(var_9, 0.0, 0.47936276525258825, conv2d_11_w, -0.060094673, 0.10868926, conv2d_11_b, -0.105962686, 0.09584572, 1, 1, 1, 1, -1, 0, 1, 0.0, 0.6409912902116734, 9);
void* var_11 = ConvLayer_PROMISE(var_10, 0.0, 0.6409912902116734, conv2d_12_w, -0.034618977, 0.05792674, conv2d_12_b, -0.4237576, 0.11035452, 1, 1, 1, 1, -1, 0, 1, 0.0, 1.1027569955587349, 9);
void* var_12 = ConvLayer_PROMISE(var_11, 0.0, 1.1027569955587349, conv2d_13_w, -0.035480656, 0.058295887, conv2d_13_b, -0.21477045, 0.14263579, 1, 1, 1, 1, 0, 2, 1, 0.0, 2.4708798038959503, 9);
void* var_13 = FCLayer_PROMISE(var_12, 0.0, 2.4708798038959503, dense_1_w, -0.08929961, 0.11301676, dense_1_b, -0.20798548, 0.47405547, 1, 0.0, 2.8148007798194876, 9);
void* var_14 = FCLayer_PROMISE(var_13, 0.0, 2.8148007798194876, dense_2_w, -0.6627122, 0.35539475, dense_2_b, -1.0631907, 0.9830786, -1, -21.189617557525633, 22.645009384155276, 9);
void* var_15 = tensorSoftmax(var_14);
uint8_t* labels = readLabelsBatch(labels_path.c_str(),start,end);
float accuracy = computeAccuracy2(labels, batch_size, var_15, 100);
final_accuracy += accuracy;
freeBatchMemory();
}
final_accuracy = final_accuracy / batch_count;
dumpFinalAccuracy(final_accuracy);
}
dumpExecutionAccuracies();
llvm_hpvm_cleanupTensorRt();
return 0;
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment